Flutter实时流媒体捕获与RTMP推流插件rtmp_with_capture的使用

发布于 1周前 作者 h691938207 来自 Flutter

Flutter实时流媒体捕获与RTMP推流插件rtmp_with_capture的使用

rtmp_with_capture

A new Flutter package for basic live streaming video to RTMP server. Capturing images during streaming is also possible.


Getting Started

此插件是 camera_with_rtmp 的简化版本,它是 flutter camera plugin 的扩展,增加了 RTMP 推流功能。我们还添加了 takePhoto 方法以在 RTMP 推流期间获取图像。它支持 Android 和 iOS(但不支持 Web)。


Features:

  • 在小部件中显示实时摄像头预览。
  • 将视频流推送到 RTMP 服务器。

iOS

<code>ios/Runner/Info.plist</code> 文件中添加两行:

  • 一行键为 <code>Privacy - Camera Usage Description</code> 并附上描述。
  • 另一行键为 <code>Privacy - Microphone Usage Description</code> 并附上描述。

或者以文本格式添加以下内容:

<key>NSCameraUsageDescription</key>
<string>Can I use the camera please?</string>
<key>NSMicrophoneUsageDescription</key>
<string>Can I use the mic please?</string>

Android

在您的 <code>android/app/build.gradle</code> 文件中将最低 Android SDK 版本更改为 21(或更高)。

minSdkVersion 21

还需要在打包选项中添加一个部分以排除文件,否则 Gradle 构建时会报错。

packagingOptions {
   exclude 'project.clj'
}

完整示例代码

以下是一个完整的示例代码,展示了如何使用 rtmp_with_capture 插件进行实时流媒体捕获与 RTMP 推流。

import 'dart:async';
import 'dart:typed_data';

import 'package:flutter/material.dart';
import 'package:rtmp_with_capture/camera.dart';
import 'package:video_player/video_player.dart';
import 'package:wakelock/wakelock.dart';

class CameraExampleHome extends StatefulWidget {
  const CameraExampleHome({Key? key}) : super(key: key);

  [@override](/user/override)
  _CameraExampleHomeState createState() {
    return _CameraExampleHomeState();
  }
}

/// Returns a suitable camera icon for [direction].
IconData getCameraLensIcon(CameraLensDirection direction) {
  switch (direction) {
    case CameraLensDirection.back:
      return Icons.camera_rear;
    case CameraLensDirection.front:
      return Icons.camera_front;
    case CameraLensDirection.external:
      return Icons.camera;
  }
}

void logError(String code, String message) =>
    print('Error: $code\nError Message: $message');

class _CameraExampleHomeState extends State<CameraExampleHome>
    with WidgetsBindingObserver, TickerProviderStateMixin {
  CameraController? controller;
  String? imagePath;
  String? videoPath;
  String? url;
  VideoPlayerController? videoController;
  late VoidCallback videoPlayerListener;
  bool enableAudio = true;
  bool useOpenGL = true;
  String streamURL =
      "rtmp://3.35.108.14/channel2/4e597b66-74cc-4bbd-adc4-290c7fc3b809";
  bool streaming = false;
  String? cameraDirection;

  Timer? _timer;

  [@override](/user/override)
  void initState() {
    _initialize();
    super.initState();
  }

  [@override](/user/override)
  void dispose() {
    super.dispose();
  }

  Future<void> _initialize() async {
    print("initialized");
    streaming = false;
    cameraDirection = 'front';
    controller = CameraController(cameras[1], ResolutionPreset.high, takePhotoCallback1: (image) {
      _showDialog(image, context);
    },);
    await controller!.initialize();
    if (!mounted) {
      return;
    }
    setState(() {});
  }

  [@override](/user/override)
  void didChangeAppLifecycleState(AppLifecycleState state) {
    if (controller == null || !controller!.value.isInitialized) {
      return;
    }
    if (state == AppLifecycleState.inactive) {
      controller?.dispose();
      if (_timer != null) {
        _timer!.cancel();
        _timer = null;
      }
    } else if (state == AppLifecycleState.resumed) {
      if (controller != null) {
        onNewCameraSelected(controller!.description!);
      }
    }
  }

  final GlobalKey<ScaffoldState> _scaffoldKey = GlobalKey<ScaffoldState>();

  toggleCameraDirection() async {
    if (cameraDirection == 'front') {
      if (controller != null) {
        await controller?.dispose();
      }
      controller = CameraController(
        cameras[0],
        ResolutionPreset.high,
        enableAudio: enableAudio,
        androidUseOpenGL: useOpenGL,
        takePhotoCallback1: (image) {
          _showDialog(image, context);
        },
      );

      controller!.addListener(() {
        if (mounted) setState(() {});
        if (controller!.value.hasError) {
          showInSnackBar('Camera error ${controller!.value.errorDescription}');
          if (_timer != null) {
            _timer!.cancel();
            _timer = null;
          }
          Wakelock.disable();
        }
      });

      try {
        await controller!.initialize();
      } on CameraException catch (e) {
        _showCameraException(e);
      }

      if (mounted) {
        setState(() {});
      }
      cameraDirection = 'back';
    } else {
      if (controller != null) {
        await controller!.dispose();
      }
      controller = CameraController(
        cameras[1],
        ResolutionPreset.high,
        enableAudio: enableAudio,
        androidUseOpenGL: useOpenGL,
        takePhotoCallback1: (image) {
          _showDialog(image, context);
        },
      );

      controller!.addListener(() {
        if (mounted) setState(() {});
        if (controller!.value.hasError) {
          showInSnackBar('Camera error ${controller!.value.errorDescription}');
          if (_timer != null) {
            _timer!.cancel();
            _timer = null;
          }
          Wakelock.disable();
        }
      });

      try {
        await controller!.initialize();
      } on CameraException catch (e) {
        _showCameraException(e);
      }

      if (mounted) {
        setState(() {});
      }
      cameraDirection = 'front';
    }
  }

  [@override](/user/override)
  Widget build(BuildContext context) {
    return Scaffold(
      extendBodyBehindAppBar: true,
      resizeToAvoidBottomInset: true,
      key: _scaffoldKey,
      body: SingleChildScrollView(
        child: SizedBox(
          height: MediaQuery.of(context).size.height,
          child: Stack(
            children: <Widget>[
              Container(
                color: Colors.black,
                child: Center(
                  child: _cameraPreviewWidget(),
                ),
              ),
              Positioned(
                top: 0.0,
                left: 0.0,
                right: 0.0,
                child: AppBar(
                  backgroundColor: Colors.transparent,
                  elevation: 0.0,
                  title: streaming
                      ? ElevatedButton(
                          onPressed: () => onStopButtonPressed(),
                          style: ButtonStyle(
                              backgroundColor:
                                  MaterialStateProperty.all<Color>(Colors.red)),
                          child: Row(
                            mainAxisSize: MainAxisSize.min,
                            mainAxisAlignment: MainAxisAlignment.center,
                            children: const [
                              Icon(Icons.videocam_off),
                              SizedBox(width: 10),
                              Text(
                                'End Stream',
                                style: TextStyle(
                                  fontSize: 20.0,
                                  fontWeight: FontWeight.bold,
                                  decoration: TextDecoration.underline,
                                ),
                              ),
                            ],
                          ),
                        )
                      : ElevatedButton(
                          onPressed: () => onVideoStreamingButtonPressed(),
                          style: ButtonStyle(
                              backgroundColor: MaterialStateProperty.all<Color>(
                                  Colors.blue)),
                          child: Row(
                            mainAxisSize: MainAxisSize.min,
                            mainAxisAlignment: MainAxisAlignment.center,
                            children: const [
                              Icon(Icons.videocam),
                              SizedBox(width: 10),
                              Text(
                                'Start Stream',
                                style: TextStyle(
                                  fontSize: 20.0,
                                  fontWeight: FontWeight.bold,
                                  decoration: TextDecoration.underline,
                                ),
                              ),
                            ],
                          ),
                        ),
                  actions: [
                    Padding(
                      padding: const EdgeInsets.all(10.0),
                      child: IconButton(
                        color: Theme.of(context).primaryColor,
                        icon: const Icon(Icons.switch_video),
                        tooltip: 'Switch Camera',
                        onPressed: () {
                          toggleCameraDirection();
                        },
                      ),
                    ),
                  ],
                ),
              ),
              Positioned(
                  top: 200,
                  child: ElevatedButton(
                    onPressed: () async {
                      print("onpressed!");
                      await controller!.takePhoto();
                    },
                    child: Text("capture"),
                  ))
            ],
          ),
        ),
      ),
    );
  }

  /// Display the preview from the camera (or a message if the preview is not available).
  Widget _cameraPreviewWidget() {
    if (controller == null || !controller!.value.isInitialized) {
      return const Text(
        'Tap a camera',
        style: TextStyle(
          color: Colors.white,
          fontSize: 24.0,
          fontWeight: FontWeight.w900,
        ),
      );
    } else {
      return AspectRatio(
        aspectRatio: controller!.value.aspectRatio,
        child: CameraPreview(controller!),
      );
    }
  }

  String timestamp() => DateTime.now().millisecondsSinceEpoch.toString();

  void showInSnackBar(String message) {
    ScaffoldMessenger.of(context)
        .showSnackBar(SnackBar(content: Text(message)));
  }

  void onNewCameraSelected(CameraDescription? cameraDescription) async {
    if (controller != null) {
      await controller!.dispose();
    }
    if (cameraDescription == null) {
      print('cameraDescription is null');
    }
    controller = CameraController(
      cameraDescription,
      ResolutionPreset.medium,
      enableAudio: enableAudio,
      androidUseOpenGL: useOpenGL,
      takePhotoCallback1: (image) {
        print("call back!!");
      },
    );

    controller!.addListener(() {
      if (mounted) setState(() {});
      if (controller!.value.hasError) {
        showInSnackBar('Camera error ${controller!.value.errorDescription}');
        if (_timer != null) {
          _timer!.cancel();
          _timer = null;
        }
        Wakelock.disable();
      }
    });

    try {
      await controller!.initialize();
    } on CameraException catch (e) {
      _showCameraException(e);
    }

    if (mounted) {
      setState(() {});
    }
  }

  Future<void> onVideoStreamingButtonPressed() async {
    startVideoStreaming().then((url) {
      if (mounted) {
        setState(() {
          streaming = true;
        });
      }
      if (url!.isNotEmpty) showInSnackBar('Streaming video to $url');
      Wakelock.enable();
    });
  }

  void onStopButtonPressed() {
    stopVideoStreaming().then((_) {
      if (mounted) {
        setState(() {
          streaming = false;
        });
      }
      showInSnackBar('Streaming to: $url');
    });
    Wakelock.disable();
  }

  void onPauseStreamingButtonPressed() {
    pauseVideoStreaming().then((_) {
      if (mounted) setState(() {});
      showInSnackBar('Streaming paused');
    });
  }

  void onResumeStreamingButtonPressed() {
    resumeVideoStreaming().then((_) {
      if (mounted) setState(() {});
      showInSnackBar('Streaming resumed');
    });
  }

  Future<String?> startVideoStreaming() async {
    if (!controller!.value.isInitialized) {
      showInSnackBar('Error: select a camera first.');
      return null;
    }

    String myUrl = streamURL;

    try {
      if (_timer != null) {
        _timer!.cancel();
        _timer = null;
      }
      url = myUrl;
      await controller!.startVideoStreaming(url!,
          androidUseOpenGL: false, width: 480, height: 640);
    } on CameraException catch (e) {
      _showCameraException(e);
      return null;
    }
    return url;
  }

  Future<void> stopVideoStreaming() async {
    try {
      await controller!.stopVideoStreaming();
      if (_timer != null) {
        _timer!.cancel();
        _timer = null;
      }
    } on CameraException catch (e) {
      _showCameraException(e);
      return;
    }
  }

  Future<void> pauseVideoStreaming() async {
    if (!controller!.value.isStreamingVideoRtmp) {
      return;
    }

    try {
      await controller!.pauseVideoStreaming();
    } on CameraException catch (e) {
      _showCameraException(e);
      rethrow;
    }
  }

  Future<void> resumeVideoStreaming() async {
    try {
      await controller!.resumeVideoStreaming();
    } on CameraException catch (e) {
      _showCameraException(e);
      rethrow;
    }
  }

  void _showCameraException(CameraException e) {
    logError(e.code, e.description);
    showInSnackBar('Error: ${e.code}\n${e.description}');
  }

  void _showDialog(Image image, BuildContext context) {
    showDialog(context: context, builder: (context) => AlertDialog(actions: [image],));
  }
}

class CameraApp extends StatelessWidget {
  const CameraApp({Key? key}) : super(key: key);

  [@override](/user/override)
  Widget build(BuildContext context) {
    return const MaterialApp(
      home: CameraExampleHome(),
    );
  }
}

List<CameraDescription> cameras = [];

Future<void> main() async {
  try {
    WidgetsFlutterBinding.ensureInitialized();
    cameras = await availableCameras();
  } on CameraException catch (e) {
    logError(e.code, e.description);
  }
  runApp(const CameraApp());
}

更多关于Flutter实时流媒体捕获与RTMP推流插件rtmp_with_capture的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html

1 回复

更多关于Flutter实时流媒体捕获与RTMP推流插件rtmp_with_capture的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html


在Flutter中实现实时流媒体捕获与RTMP推流,可以使用rtmp_with_capture插件。这个插件结合了视频捕获和RTMP推流的功能,适合用于直播等场景。以下是如何使用rtmp_with_capture插件的详细步骤。

1. 添加依赖

首先,在pubspec.yaml文件中添加rtmp_with_capture插件的依赖:

dependencies:
  flutter:
    sdk: flutter
  rtmp_with_capture: ^1.0.0  # 请检查最新版本

然后运行flutter pub get来安装依赖。

2. 配置权限

在Android和iOS平台上,你需要配置相应的权限来访问摄像头和麦克风。

Android

AndroidManifest.xml中添加以下权限:

<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />

iOS

Info.plist中添加以下权限:

<key>NSCameraUsageDescription</key>
<string>We need access to your camera for live streaming.</string>
<key>NSMicrophoneUsageDescription</key>
<string>We need access to your microphone for live streaming.</string>

3. 使用rtmp_with_capture插件

以下是一个简单的示例,展示如何使用rtmp_with_capture插件进行视频捕获和RTMP推流。

import 'package:flutter/material.dart';
import 'package:rtmp_with_capture/rtmp_with_capture.dart';

void main() {
  runApp(MyApp());
}

class MyApp extends StatelessWidget {
  [@override](/user/override)
  Widget build(BuildContext context) {
    return MaterialApp(
      home: RtmpStreamingScreen(),
    );
  }
}

class RtmpStreamingScreen extends StatefulWidget {
  [@override](/user/override)
  _RtmpStreamingScreenState createState() => _RtmpStreamingScreenState();
}

class _RtmpStreamingScreenState extends State<RtmpStreamingScreen> {
  RtmpWithCapture _rtmpWithCapture = RtmpWithCapture();
  bool _isStreaming = false;

  [@override](/user/override)
  void initState() {
    super.initState();
    _rtmpWithCapture.initialize();
  }

  [@override](/user/override)
  void dispose() {
    _rtmpWithCapture.dispose();
    super.dispose();
  }

  Future<void> _startStreaming() async {
    try {
      await _rtmpWithCapture.startStream(
        rtmpUrl: "rtmp://your-rtmp-server-url/live/streamkey",
        videoWidth: 1280,
        videoHeight: 720,
        videoBitrate: 1200,
        audioBitrate: 64,
        frameRate: 30,
      );
      setState(() {
        _isStreaming = true;
      });
    } catch (e) {
      print("Failed to start streaming: $e");
    }
  }

  Future<void> _stopStreaming() async {
    try {
      await _rtmpWithCapture.stopStream();
      setState(() {
        _isStreaming = false;
      });
    } catch (e) {
      print("Failed to stop streaming: $e");
    }
  }

  [@override](/user/override)
  Widget build(BuildContext context) {
    return Scaffold(
      appBar: AppBar(
        title: Text('RTMP Streaming'),
      ),
      body: Center(
        child: Column(
          mainAxisAlignment: MainAxisAlignment.center,
          children: [
            _isStreaming
                ? Text('Streaming is live')
                : Text('Press the button to start streaming'),
            SizedBox(height: 20),
            ElevatedButton(
              onPressed: _isStreaming ? _stopStreaming : _startStreaming,
              child: Text(_isStreaming ? 'Stop Streaming' : 'Start Streaming'),
            ),
          ],
        ),
      ),
    );
  }
}
回到顶部
AI 助手
你好,我是IT营的 AI 助手
您可以尝试点击下方的快捷入口开启体验!