Flutter语音识别插件flutter_xfyun_ise的使用

flutter_xfyun_ise

xfyun ise

使用步骤

该插件是一个用于Flutter的语音识别插件,支持通过讯飞语音测评服务进行语音评分。插件提供了Android和iOS平台的具体实现代码。

获取开始

此项目是一个Flutter插件包的起点,类似于官方文档中提到的插件包。如果您刚刚接触Flutter,可以参考官方的在线文档,其中包含教程、示例、移动开发指南以及完整的API参考。

完整示例代码

以下是一个完整的示例代码,展示了如何使用flutter_xfyun_ise插件进行语音测评。

import 'package:flutter/material.dart';
import 'package:flutter_xfyun_ise/bean/ise_param.dart';
import 'package:flutter_xfyun_ise/flutter_xfyun_ise.dart';
import 'package:path_provider/path_provider.dart';

void main() {
  runApp(const MyApp());
}

class MyApp extends StatefulWidget {
  const MyApp({Key? key}) : super(key: key);

  [@override](/user/override)
  State<MyApp> createState() => _MyAppState();
}

class _MyAppState extends State<MyApp> {
  /// 测试内容文本
  var content = "一座座雪峰插入云霄,峰顶银光闪闪,大大小小的湖泊,像颗颗宝石镶嵌在彩带般的沟谷中。";
  var _score = "暂无评分";
  var _path = "";

  [@override](/user/override)
  void initState() {
    super.initState();
    initPlatformState();
    setListener();
  }

  // 设置监听处理
  void setListener() {
    FlutterXfyunIse.instance.setOnErrorListener((error) {
      print("****************************");
      print("错误信息:    ${error}");
      print("****************************");
    });
    FlutterXfyunIse.instance.setOnResultListener((score) {
      print("****************************");
      print("回调信息:    ${score["total_score"]}");
      print("回调信息:    ${score["phone_score"]}");
      Map map = score["phone_score"];
      map.forEach((key, value) {
        print("key:     $key    -----       value:       $value");
      });
      // setState(() {
      //   _score = score;
      // });
      print("****************************");
    });
  }

  Future<void> setIseParameter() async {
    var dir = await getTemporaryDirectory();
    _path = "${dir.path}/${111222}.wav";
    // 设置参数
    FlutterXfyunIse.instance.setParameter(
      param: IseParam()
        ..language = "zh_cn"
        ..category = "read_sentence"
        ..resultLevel = "complete"
        ..vadBos = "5000"
        ..vadEos = "1800"
        ..speechTimeout = "-1"
        ..subject = "ise"
        ..plev = "0"
        ..iseUnite = "1"
        ..rst = "entirety"
        ..extraAbility = "syll_phone_err_msg;pitch;multi_dimension"
        ..textEncoding = "utf-8"
        ..aue = "opus"
        ..audioFormat = "wav"
        ..iseAudioPath = "$_path",
    );
    print("setIseParameter  路径展示 :        ${_path}");
  }

  Future<void> initPlatformState() async {
    FlutterXfyunIse.instance.init(appid: "269d43b0");
  }

  [@override](/user/override)
  Widget build(BuildContext context) {
    return MaterialApp(
      home: Scaffold(
        appBar: AppBar(title: const Text('讯飞语音测评')),
        body: SingleChildScrollView(
          child: Column(
            children: [
              Text("上次评分:       $_score"),
              Container(width: 200, height: 10, color: Colors.green, margin: EdgeInsets.only(top: 20, bottom: 20)),
              Text("测试数据:       $content"),
              SizedBox(height: 100),
              InkWell(
                child: Container(
                  width: 100,
                  height: 50,
                  color: Colors.red,
                  alignment: Alignment.center,
                  child: Text("开始测评"),
                ),
                onTap: () {
                  setIseParameter();
                  FlutterXfyunIse.instance.start(content: content);
                },
              ),
              InkWell(
                child: Container(
                  width: 100,
                  height: 50,
                  color: Colors.red,
                  alignment: Alignment.center,
                  child: Text("停止测评"),
                ),
                onTap: () {
                  FlutterXfyunIse.instance.stop();
                },
              ),
              InkWell(
                child: Container(
                  width: 100,
                  height: 50,
                  color: Colors.red,
                  alignment: Alignment.center,
                  child: Text("取消测评"),
                ),
                onTap: () {
                  FlutterXfyunIse.instance.cancel();
                },
              ),
              InkWell(
                child: Container(
                  width: 100,
                  height: 50,
                  color: Colors.red,
                  alignment: Alignment.center,
                  child: Text("结果解析"),
                ),
                onTap: () {
                  // 结果解析
                  FlutterXfyunIse.instance.resultsParsing();
                },
              ),
              InkWell(
                child: Container(
                  width: 100,
                  height: 50,
                  color: Colors.red,
                  alignment: Alignment.center,
                  child: Text("播放录音音频"),
                ),
                onTap: () async {
                  // AudioPlayerManager.instance.play(_path);
                },
              ),

              InkWell(
                child: Container(
                  width: 100,
                  height: 50,
                  color: Colors.red,
                  alignment: Alignment.center,
                  child: Text("关闭activity"),
                ),
                onTap: () async {
                  FlutterXfyunIse.instance.closeFlutter();
                },
              ),
            ],
          ),
        ),
      ),
    );
  }
}

更多关于Flutter语音识别插件flutter_xfyun_ise的使用的实战教程也可以访问 https://www.itying.com/category-92-b0.html

1 回复

更多关于Flutter语音识别插件flutter_xfyun_ise的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html


flutter_xfyun_ise 是一个用于在 Flutter 应用中实现语音识别的插件,它基于讯飞开放平台的语音识别技术。使用该插件,你可以轻松地将语音识别功能集成到你的 Flutter 应用中。

1. 添加依赖

首先,你需要在 pubspec.yaml 文件中添加 flutter_xfyun_ise 插件的依赖:

dependencies:
  flutter:
    sdk: flutter
  flutter_xfyun_ise: ^1.0.0  # 请使用最新版本

然后运行 flutter pub get 来获取依赖。

2. 配置讯飞开放平台

在使用 flutter_xfyun_ise 之前,你需要在讯飞开放平台注册并创建一个应用,以获取 APPIDAPI_KEY

  1. 访问 讯飞开放平台 并注册账号。
  2. 创建一个新应用,并获取 APPIDAPI_KEY

3. 初始化插件

在你的 Flutter 应用中初始化 flutter_xfyun_ise 插件:

import 'package:flutter_xfyun_ise/flutter_xfyun_ise.dart';

void main() async {
  WidgetsFlutterBinding.ensureInitialized();
  
  // 初始化插件
  await FlutterXfyunIse.init(
    appId: 'your_app_id',  // 替换为你的 APPID
    apiKey: 'your_api_key',  // 替换为你的 API_KEY
  );

  runApp(MyApp());
}

4. 使用语音识别功能

你可以使用 FlutterXfyunIse 类中的方法来启动和停止语音识别。

import 'package:flutter/material.dart';
import 'package:flutter_xfyun_ise/flutter_xfyun_ise.dart';

class MyApp extends StatelessWidget {
  @override
  Widget build(BuildContext context) {
    return MaterialApp(
      home: SpeechRecognitionPage(),
    );
  }
}

class SpeechRecognitionPage extends StatefulWidget {
  @override
  _SpeechRecognitionPageState createState() => _SpeechRecognitionPageState();
}

class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
  String _recognizedText = '';

  void _startRecognition() async {
    try {
      // 开始语音识别
      await FlutterXfyunIse.startListening(
        onResult: (result) {
          setState(() {
            _recognizedText = result;
          });
        },
        onError: (error) {
          print('Error: $error');
        },
      );
    } catch (e) {
      print('Exception: $e');
    }
  }

  void _stopRecognition() async {
    // 停止语音识别
    await FlutterXfyunIse.stopListening();
  }

  @override
  Widget build(BuildContext context) {
    return Scaffold(
      appBar: AppBar(
        title: Text('语音识别示例'),
      ),
      body: Center(
        child: Column(
          mainAxisAlignment: MainAxisAlignment.center,
          children: <Widget>[
            Text('识别结果: $_recognizedText'),
            SizedBox(height: 20),
            ElevatedButton(
              onPressed: _startRecognition,
              child: Text('开始识别'),
            ),
            ElevatedButton(
              onPressed: _stopRecognition,
              child: Text('停止识别'),
            ),
          ],
        ),
      ),
    );
  }
}

5. 处理权限

在 Android 和 iOS 上,语音识别需要麦克风权限。你需要在 AndroidManifest.xmlInfo.plist 中添加相应的权限声明。

Android:

<uses-permission android:name="android.permission.RECORD_AUDIO" />

iOS:

<key>NSMicrophoneUsageDescription</key>
<string>我们需要访问您的麦克风以进行语音识别</string>
回到顶部