Flutter视频处理插件ffmpeg_wasm的使用

发布于 1周前 作者 itying888 来自 Flutter

Flutter视频处理插件ffmpeg_wasm的使用

ffmpeg_wasm简介

ffmpeg.wasm 是一个用于Flutter Web的插件,它允许开发者在Web应用中使用FFmpeg进行视频和音频的处理。你可以通过以下链接查看示例

安装

开发环境配置

HTML文件配置

在项目的index.html文件的<head>标签内添加如下脚本:

<script src="https://unpkg.com/@ffmpeg/ffmpeg@0.11.6/dist/ffmpeg.min.js" crossorigin="anonymous" async></script>

运行命令

在终端运行以下命令来启动项目:

flutter run -d chrome --web-browser-flag --enable-features=SharedArrayBuffer

Android Studio配置

Run -> Edit Configurations... -> Create / edit your Flutter Configuration -> Additional run args:中添加:

--web-browser-flag --enable-features=SharedArrayBuffer

Visual Studio Code配置

编辑或创建launch.json文件:

{
  "version": "0.2.0",
  "configurations": [
    {
      "name": "Example",
      "request": "launch",
      "type": "dart",
      "args": [
        "--web-browser-flag",
        "--enable-features=SharedArrayBuffer"
      ]
    }
  ]
}

生产环境配置

同样需要在index.html文件的<head>标签内添加如下脚本:

<script src="https://unpkg.com/@ffmpeg/ffmpeg@0.11.6/dist/ffmpeg.min.js" crossorigin="anonymous" async></script>

确保index.html文件包含以下HTTP头信息:

Cross-Origin-Embedder-Policy: require-corp
Cross-Origin-Opener-Policy: same-origin

对于Firebase Hosting,在firebase.json文件中添加:

{
  "hosting": {
     "headers": [
        {
          "source": "**",
          "headers": [
            {
              "key": "Cross-Origin-Embedder-Policy",
              "value": "require-corp"
            },
            {
              "key": "Cross-Origin-Opener-Policy",
              "value": "same-origin"
            }
          ]
        }
     ]
  }
}

对于Node.js Express应用:

app.use(express.static(staticDir, {
  setHeaders: (res, filePath) => {
    const fileName = path.basename(filePath);

    if (fileName === 'index.html') {
      res.setHeader('Cross-Origin-Embedder-Policy', 'require-corp');
      res.setHeader('Cross-Origin-Opener-Policy', 'same-origin');
    }
  }
}));

使用方法

创建FFmpeg实例

FFmpeg ffmpeg = createFFmpeg(CreateFFmpegParam(log: true, corePath: 'https://unpkg.com/@ffmpeg/core@0.11.0/dist/ffmpeg-core.js'));

如果你想要使用本地路径:

final url = Uri.base.resolve('ffmpeg/ffmpeg-core.js').toString();
FFmpeg ffmpeg = createFFmpeg(CreateFFmpegParam(corePath: url));

单线程实现(不需要SharedArrayBuffer):

FFmpeg ffmpeg = createFFmpeg(CreateFFmpegParam(log: true, corePath: 'https://unpkg.com/@ffmpeg/core-st@0.11.1/dist/ffmpeg-core.js',
mainName: 'main'));

使用FFmpeg实例

Future<Uint8List> exportVideo(Uint8List input) async {
  FFmpeg? ffmpeg;
  try {
    ffmpeg = createFFmpeg(CreateFFmpegParam(log: true));
    ffmpeg.setLogger(_onLogHandler);
    ffmpeg.setProgress(_onProgressHandler);

    if (!ffmpeg.isLoaded()) {
      await ffmpeg.load();
    }

    const inputFile = 'input.mp4';
    const outputFile = 'output.mp4';

    ffmpeg.writeFile(inputFile, input);

    await ffmpeg.runCommand('-i $inputFile -s 1920x1080 $outputFile');

    final data = ffmpeg.readFile(outputFile);
    return data;
  } finally {
    ffmpeg?.exit();
  }
}

void _onProgressHandler(ProgressParam progress) {
  print('Progress: ${progress.ratio * 100}%');
}

void _onLogHandler(LoggerParam logger) {
  if (logger.type == 'fferr') {
    final match = regex.firstMatch(logger.message);

    if (match != null) {
      final frame = match.group(1);
      final fps = match.group(2);
      final q = match.group(3);
      final size = match.group(4);
      final time = match.group(5);
      final bitrate = match.group(6);
      final bitrateUnit = match.group(7);
      final speed = match.group(8);

      print('frame: $frame, fps: $fps, q: $q, size: $size, time: $time, bitrate: $bitrate$bitrateUnit, speed: $speed');
    }
  }
}

示例代码

下面是一个完整的Flutter Web应用示例,展示了如何使用ffmpeg_wasm插件进行视频处理:

import 'dart:html' as html;
import 'dart:js' as js;
import 'dart:typed_data';
import 'package:ffmpeg_wasm/ffmpeg_wasm.dart';
import 'package:file_picker/file_picker.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';

void main() {
  runApp(const MyApp());
}

class MyApp extends StatelessWidget {
  const MyApp({super.key});

  @override
  Widget build(BuildContext context) {
    return MaterialApp(
      title: 'FFmpeg - WASM Flutter Demo',
      theme: ThemeData(
        primarySwatch: Colors.indigo,
      ),
      home: const MyHomePage(title: 'FFmpeg - WASM'),
    );
  }
}

class MyHomePage extends StatefulWidget {
  const MyHomePage({super.key, required this.title});

  final String title;

  @override
  State<MyHomePage> createState() => _MyHomePageState();
}

class _MyHomePageState extends State<MyHomePage> {
  late FFmpeg ffmpeg;
  bool isLoaded = false;
  String? selectedFile;
  String? conversionStatus;

  FilePickerResult? filePickerResult;

  final progress = ValueNotifier<double?>(null);
  final statistics = ValueNotifier<String?>(null);

  late Future<List<Uint8List>> dashFramesFuture;

  @override
  void initState() {
    dashFramesFuture = _genDashFrames();
    super.initState();
  }

  @override
  Widget build(BuildContext context) {
    return Scaffold(
      appBar: AppBar(
        title: Text(widget.title),
      ),
      body: Center(
        child: SingleChildScrollView(
          child: Column(
            mainAxisAlignment: MainAxisAlignment.center,
            children: <Widget>[
              Text(
                'Is FFmpeg loaded $isLoaded and selected $selectedFile',
                style: Theme.of(context).textTheme.headlineMedium,
              ),
              OutlinedButton(
                onPressed: loadFFmpeg,
                child: const Text('Load FFmpeg'),
              ),
              OutlinedButton(
                onPressed: isLoaded ? pickFile : null,
                child: const Text('Pick File'),
              ),
              OutlinedButton(
                onPressed: selectedFile == null ? null : extractFirstFrame,
                child: const Text('Extract First Frame'),
              ),
              OutlinedButton(
                onPressed: selectedFile == null ? null : createPreviewVideo,
                child: const Text('Create Preview Image'),
              ),
              Text('Conversion Status : $conversionStatus'),
              OutlinedButton(
                onPressed: selectedFile == null ? null : create720PQualityVideo,
                child: const Text('Create 720P Quality Videos'),
              ),
              OutlinedButton(
                onPressed: selectedFile == null ? null : create480PQualityVideo,
                child: const Text('Create 480P Quality Videos'),
              ),
              const SizedBox(height: 8),
              ValueListenableBuilder(
                valueListenable: progress,
                builder: (context, value, child) {
                  return value == null
                      ? const SizedBox.shrink()
                      : Row(
                          mainAxisAlignment: MainAxisAlignment.center,
                          children: [
                            Text('Exporting ${(value * 100).ceil()}%'),
                            const SizedBox(width: 6),
                            const CircularProgressIndicator(),
                          ],
                        );
                },
              ),
              const SizedBox(height: 8),
              ValueListenableBuilder(
                valueListenable: statistics,
                builder: (context, value, child) {
                  return value == null
                      ? const SizedBox.shrink()
                      : Row(
                          mainAxisAlignment: MainAxisAlignment.center,
                          children: [
                            Text(value),
                            const SizedBox(width: 6),
                            const CircularProgressIndicator(),
                          ],
                        );
                },
              ),
              Image.network(
                "https://images.pexels.com/photos/276267/pexels-photo-276267.jpeg?auto=compress&cs=tinysrgb&w=1260&h=750&dpr=2",
                height: 96,
              ),
              const SizedBox(height: 8),
              Container(
                height: 100,
                width: 500,
                margin: const EdgeInsets.symmetric(horizontal: 8),
                child: FutureBuilder(
                  future: dashFramesFuture,
                  builder: (ctx, snapshot) {
                    if (snapshot.connectionState == ConnectionState.done) {
                      return ListView.separated(
                        scrollDirection: Axis.horizontal,
                        itemCount: snapshot.data!.length,
                        physics: const AlwaysScrollableScrollPhysics(),
                        padding: const EdgeInsets.all(16),
                        itemBuilder: (ctx, index) {
                          return Container(
                            height: 100,
                            decoration: const BoxDecoration(borderRadius: BorderRadius.all(Radius.circular(16))),
                            child: Image.memory(snapshot.data![index]),
                          );
                        },
                        separatorBuilder: (ctx, index) {
                          return const SizedBox(width: 8);
                        },
                      );
                    } else {
                      return const Center(child: CircularProgressIndicator());
                    }
                  },
                ),
              ),
              const SizedBox(height: 8),
              ElevatedButton(onPressed: isLoaded ? () => createGif() : null, child: const Text('Create Gif from frames'))
            ],
          ),
        ),
      ),
      floatingActionButton: FloatingActionButton(
        onPressed: checkLoaded,
        tooltip: 'Refresh',
        child: const Icon(Icons.refresh),
      ),
    );
  }

  @override
  void dispose() {
    progress.dispose();
    statistics.dispose();
    super.dispose();
  }

  Future<void> loadFFmpeg() async {
    ffmpeg = createFFmpeg(
      CreateFFmpegParam(
        log: true,
        corePath: "https://unpkg.com/@ffmpeg/core@0.11.0/dist/ffmpeg-core.js",
      ),
    );

    ffmpeg.setProgress(_onProgressHandler);
    ffmpeg.setLogger(_onLogHandler);

    await ffmpeg.load();

    checkLoaded();
  }

  void checkLoaded() {
    setState(() {
      isLoaded = ffmpeg.isLoaded();
    });
  }

  Future<void> pickFile() async {
    filePickerResult =
        await FilePicker.platform.pickFiles(type: FileType.video);

    if (filePickerResult != null &&
        filePickerResult!.files.single.bytes != null) {
      /// Writes File to memory
      ffmpeg.writeFile('input.mp4', filePickerResult!.files.single.bytes!);

      setState(() {
        selectedFile = filePickerResult!.files.single.name;
      });
    }
  }

  /// Extracts First Frame from video
  Future<void> extractFirstFrame() async {
    await ffmpeg.run([
      '-i',
      'input.mp4',
      '-vf',
      "select='eq(n,0)'",
      '-vsync',
      '0',
      'frame1.webp'
    ]);
    final data = ffmpeg.readFile('frame1.webp');
    js.context.callMethod('webSaveAs', [
      html.Blob([data]),
      'frame1.webp'
    ]);
  }

  /// Creates Preview Image of Video
  Future<void> createPreviewVideo() async {
    await ffmpeg.run([
      '-i',
      'input.mp4',
      '-t',
      '5.0',
      '-ss',
      '2.0',
      '-s',
      '480x720',
      '-f',
      'webp',
      '-r',
      '5',
      'previewWebp.webp'
    ]);
    final previewWebpData = ffmpeg.readFile('previewWebp.webp');
    js.context.callMethod('webSaveAs', [
      html.Blob([previewWebpData]),
      'previewWebp.webp'
    ]);
  }

  Future<void> create720PQualityVideo() async {
    setState(() {
      conversionStatus = 'Started';
    });
    await ffmpeg.run([
      '-i',
      'input.mp4',
      '-s',
      '720x1280',
      '-c:a',
      'copy',
      '720P_output.mp4'
    ]);
    setState(() {
      conversionStatus = 'Saving';
    });
    final hqVideo = ffmpeg.readFile('720P_output.mp4');
    setState(() {
      conversionStatus = 'Downloading';
    });
    js.context.callMethod('webSaveAs', [
      html.Blob([hqVideo]),
      '720P_output.mp4'
    ]);
    setState(() {
      conversionStatus = 'Completed';
    });
  }

  Future<void> create480PQualityVideo() async {
    setState(() {
      conversionStatus = 'Started';
    });
    await ffmpeg.run([
      '-i',
      'input.mp4',
      '-s',
      '480x720',
      '-c:a',
      'copy',
      '480P_output.mp4'
    ]);
    setState(() {
      conversionStatus = 'Saving';
    });
    final hqVideo = ffmpeg.readFile('480P_output.mp4');
    setState(() {
      conversionStatus = 'Downloading';
    });
    js.context.callMethod('webSaveAs', [
      html.Blob([hqVideo]),
      '480P_output.mp4'
    ]);
    setState(() {
      conversionStatus = 'Completed';
    });
  }

  /// It will create gif from png frames
  Future<void> createGif() async {
    for (int i = 0; i <= 43; i++) {
      final ByteData data = await rootBundle.load(i < 10 ? 'flutter_dash_frames/flutter_dash_00$i.png' : 'flutter_dash_frames/flutter_dash_0$i.png');
      final file = data.buffer.asUint8List();
      ffmpeg.writeFile(i < 10 ? 'flutter_dash_00$i.png' : 'flutter_dash_0$i.png', file);
    }

    await ffmpeg.run([
      '-framerate',
      '30',
      '-i',
      'flutter_dash_%03d.png',
      '-vf',
      'palettegen',
      'palette.png',
    ]);
    await ffmpeg.run([
      '-framerate',
      '30',
      '-i',
      'flutter_dash_%03d.png',
      '-i',
      'palette.png',
      '-filter_complex',
      '[0:v][1:v]paletteuse',
      'flutter_dash.gif',
    ]);
    final previewWebpData = ffmpeg.readFile('flutter_dash.gif');
    js.context.callMethod('webSaveAs', [
      html.Blob([previewWebpData]),
      'flutter_dash.gif'
    ]);
  }

  ///It will generate List of frames to show in ui
  Future<List<Uint8List>> _genDashFrames() async {
    List<Uint8List> frames = [];
    for (int i = 0; i <= 43; i++) {
      final ByteData data = await rootBundle.load(i < 10 ? 'flutter_dash_frames/flutter_dash_00$i.png' : 'flutter_dash_frames/flutter_dash_0$i.png');
      final image = data.buffer.asUint8List();
      frames.add(image);
    }
    return frames;
  }

  void _onProgressHandler(ProgressParam progress) {
    final isDone = progress.ratio >= 1;

    this.progress.value = isDone ? null : progress.ratio;
    if (isDone) {
      statistics.value = null;
    }
  }

  static final regex = RegExp(
    r'frame\s*=\s*(\d+)\s+fps\s*=\s*(\d+(?:\.\d+)?)\s+q\s*=\s*([\d.-]+)\s+L?size\s*=\s*(\d+)\w*\s+time\s*=\s*([\d:\.]+)\s+bitrate\s*=\s*([\d.]+)\s*(\w+)/s\s+speed\s*=\s*([\d.]+)x',
  );

  void _onLogHandler(LoggerParam logger) {
    if (logger.type == 'fferr') {
      final match = regex.firstMatch(logger.message);

      if (match != null) {
        // indicates the number of frames that have been processed so far.
        final frame = match.group(1);
        // is the current frame rate
        final fps = match.group(2);
        // stands for quality 0.0 indicating lossless compression, other values indicating that there is some lossy compression happening
        final q = match.group(3);
        // indicates the size of the output file so far
        final size = match.group(4);
        // is the time that has elapsed since the beginning of the conversion
        final time = match.group(5);
        // is the current output bitrate
        final bitrate = match.group(6);
        // for instance: 'kbits/s'
        final bitrateUnit = match.group(7);
        // is the speed at which the conversion is happening, relative to real-time
        final speed = match.group(8);

        statistics.value =
            'frame: $frame, fps: $fps, q: $q, size: $size, time: $time, bitrate: $bitrate$bitrateUnit, speed: $speed';
      }
    }
  }
}

这个例子展示了如何在Flutter Web应用中加载FFmpeg、选择视频文件并执行各种视频处理任务,如提取第一帧、创建预览图像、生成不同分辨率的视频以及从图片帧创建GIF动画。


更多关于Flutter视频处理插件ffmpeg_wasm的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html

1 回复

更多关于Flutter视频处理插件ffmpeg_wasm的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html


当然,以下是一个关于如何在Flutter项目中使用ffmpeg_wasm插件进行视频处理的代码示例。ffmpeg_wasm是一个基于WebAssembly的FFmpeg库,可以在Flutter的Web平台上运行,用于视频和音频处理。

步骤1:添加依赖

首先,在你的pubspec.yaml文件中添加ffmpeg_wasm依赖:

dependencies:
  flutter:
    sdk: flutter
  ffmpeg_wasm: ^0.10.0  # 请检查最新版本号

然后运行flutter pub get来安装依赖。

步骤2:配置Web支持

确保你的Flutter项目已经配置了Web支持。你可以在项目的根目录下运行以下命令来添加Web支持:

flutter config --enable-web

步骤3:使用ffmpeg_wasm进行视频处理

以下是一个简单的示例,展示如何使用ffmpeg_wasm进行视频转码(例如,将视频转换为MP4格式)。

1. 导入必要的包

import 'package:flutter/material.dart';
import 'package:ffmpeg_wasm/ffmpeg_wasm.dart';
import 'dart:js' as js;
import 'dart:typed_data/typed_data.dart';
import 'dart:ui' as ui;
import 'dart:html' as html;

2. 创建一个视频处理函数

Future<void> transcodeVideo(File inputFile, String outputPath) async {
  // 读取输入文件
  final Uint8List fileBytes = await inputFile.readAsBytes();
  final Blob blob = new Blob([fileBytes]);
  final URL url = window.URL.createObjectURL(blob);

  // 配置FFmpeg
  await FFmpeg.load();
  FFmpeg ffmpeg = FFmpeg();

  // 执行FFmpeg命令
  // 这是一个简单的示例,将视频转换为MP4格式
  // 你可以根据需要修改FFmpeg命令
  String command = '-i $url -c:v libx264 -pix_fmt yuv420p $outputPath';
  
  await ffmpeg.execute(command).then((rc) {
    return rc.onSuccess.then((message) {
      window.URL.revokeObjectURL(url);
      print("FFmpeg process exited with rc code $message");
    }).catchError((error) {
      window.URL.revokeObjectURL(url);
      print("FFmpeg process failed with error $error");
    });
  }).catchError((error) {
    window.URL.revokeObjectURL(url);
    print("FFmpeg execution failed with error $error");
  });
}

3. 创建一个Flutter界面来选择文件并触发处理

void main() {
  runApp(MyApp());
}

class MyApp extends StatelessWidget {
  @override
  Widget build(BuildContext context) {
    return MaterialApp(
      home: Scaffold(
        appBar: AppBar(
          title: Text('FFmpeg WASM Example'),
        ),
        body: Center(
          child: ElevatedButton(
            onPressed: () async {
              // 选择文件
              final html.File inputFile = await html.window.showOpenFilePicker();
              final String outputPath = 'output.mp4'; // 输出路径,这里只是示例,实际使用时需要处理文件存储问题

              // 调用视频处理函数
              transcodeVideo(File.fromPath(inputFile.path), outputPath);
            },
            child: Text('Select Video and Transcode'),
          ),
        ),
      ),
    );
  }
}

注意事项

  1. 文件存储:在Web平台上,直接访问文件系统受限。因此,输出文件的处理需要特别注意。在实际应用中,你可能需要将处理后的数据上传到服务器或使用其他方法进行存储。

  2. 权限:确保你的Web应用有适当的权限来访问文件系统和执行FFmpeg命令。

  3. 兼容性ffmpeg_wasm依赖于WebAssembly,因此仅在支持WebAssembly的浏览器上有效。

  4. 性能:由于FFmpeg是一个功能强大的工具,视频处理可能需要较长时间和大量计算资源。在Web平台上,性能可能会受到限制。

这个示例提供了一个基本的框架,你可以根据需要进行扩展和修改。

回到顶部