HarmonyOS 鸿蒙Next 使用audio.AudioCapturer录制的wav音频无法播放

发布于 1周前 作者 h691938207 最后一次编辑是 5天前 来自 鸿蒙OS

HarmonyOS 鸿蒙Next 使用audio.AudioCapturer录制的wav音频无法播放

根据AudioCapturer使用指南https://developer.huawei.com/consumer/cn/doc/harmonyos-guides-V5/using-audiocapturer-for-recording-V5里面的demo开发音频录制,发现录制结果无法播放,也无法从录制文件获取音频编码、通道信息。 尝试自己为录制音频文件添加wavHeader,结果还是无法播放。   代码如下

import { IAudioRecorder, IAudioRecorderConfig } from '[@ohos](/user/ohos)/router';
import { KSAudioStreamRecogniser } from '../audio2Text/KSAudioStreamRecogniser';
import { audio } from '[@kit](/user/kit).AudioKit';
import { fileIo, WriteOptions } from '[@kit](/user/kit).CoreFileKit';
import { BusinessError } from '[@kit](/user/kit).BasicServicesKit';
import { hilog } from '[@kit](/user/kit).PerformanceAnalysisKit';
import { KSPCMEncoder } from '../utils/KSPCMEncoder';

const KSAudioRecordUsingMP3 = false

//使用audioCapture录制pcm数据,然后通过AVtranscoder转MP3 

export class KSAudioRecorder implements IAudioRecorder {
 config: IAudioRecorderConfig = {
   maxDuration: 60,
 }
 //pcm录制路径 
 pcmPath: string = `${getContext(this).tempDir}/${new Date().getTime()}.pcm`
 wavPath: string = `${getContext(this).tempDir}/${new Date().getTime()}.wav`
 mp3Path: string = `${getContext(this).tempDir}/${new Date().getTime()}.mp3`
 file?: fileIo.File;
 recogniser?: KSAudioStreamRecogniser
 audioVolumeManager?: audio.AudioVolumeManager;
 audioCapturer?: audio.AudioCapturer;
 sampleRate: number = audio.AudioSamplingRate.SAMPLE_RATE_48000;

 async startRecord() {
   try {
     await this.prepareRecogniser()
     await this.prepareRecorder()
     await this.recogniser?.start()
     await this.audioCapturer?.start()
   } catch (e) {
     hilog.debug(-1, '录音失败', e);
   }
 }

 private async prepareRecogniser() {
   this.recogniser = new KSAudioStreamRecogniser();
   this.recogniser.sampleRate = this.sampleRate
   this.recogniser.channel = 2
   this.recogniser.speechRecognised = this.config.speechRecognised
 }

 private async prepareRecorder() {
   let bufferSize: number = 0;
   let audioStreamInfo: audio.AudioStreamInfo = {
     samplingRate: this.sampleRate, // 采样率 
     channels: audio.AudioChannel.CHANNEL_2, // 通道 
     sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, // 采样格式 
     encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW // 编码格式 
   }
   let audioCapturerInfo: audio.AudioCapturerInfo = {
     source: audio.SourceType.SOURCE_TYPE_MIC, // 音源类型 
     capturerFlags: 0 // 音频采集器标志 
   }
   let audioCapturerOptions: audio.AudioCapturerOptions =
     { streamInfo: audioStreamInfo, capturerInfo: audioCapturerInfo }
   this.file = fileIo.openSync(this.pcmPath, fileIo.OpenMode.READ_WRITE |
   fileIo.OpenMode.CREATE);
   //初始化录制器 
   this.audioCapturer = await audio.createAudioCapturer(audioCapturerOptions)
   let audioManager = audio.getAudioManager();
   this.audioVolumeManager = audioManager.getVolumeManager();
   //监听音量变更 
   this.audioVolumeManager.on('volumeChange', (volumeEvent: audio.VolumeEvent) => {
     if (this.config.volChanged) {
       this.config.volChanged(volumeEvent.volume);
     }
   })
   //收流  识别语音、写入pcm 
   this.audioCapturer.on('readData', (buffer: ArrayBuffer) => {
     let options: WriteOptions = { offset: bufferSize, length: buffer.byteLength }
     fileIo.writeSync(this.file?.fd, buffer, options);
     //抛给asr识别文字 
     this.recogniser?.writeArrayBuffer(buffer)
     bufferSize += buffer.byteLength;
   });
 }

 async stopRecord() {
   try {
     await this.audioCapturer?.stop()
     await fileIo.close(this.file);
     this.audioCapturer?.release()
     if (KSAudioRecordUsingMP3) {
       this.transformCodeMP3();
     } else {
       this.transformCodeWAV()
     }
   } catch (e) {
     hilog.debug(-1, '录音失败', e);
   }
 }

 //需要把PCM数据转wav 其实也可以直接在文件录制的时候先写wav头 
 private transformCodeWAV() {
   try {
     let encoder = new KSPCMEncoder(this.sampleRate, 2)
     encoder.pcmToWav(this.pcmPath, this.wavPath)
   } catch (e) {
     hilog.debug(-2, 'pcm转wav失败', e);
   }
   if (this.config.complete) {
     this.config.complete(this.wavPath)
   }
 }

 //pcm转mp3  暂时没有啥方案  改天试试 
 private transformCodeMP3() {
 }
}

更多关于HarmonyOS 鸿蒙Next 使用audio.AudioCapturer录制的wav音频无法播放的实战系列教程也可以访问 https://www.itying.com/category-93-b0.html

2 回复

以下AudioCapturer录音的demo

import audio from '[@ohos](/user/ohos).multimedia.audio'

import fs from '[@ohos](/user/ohos).file.fs';

import { abilityAccessCtrl, common, PermissionRequestResult, Permissions } from '[@kit](/user/kit).AbilityKit';

import { BusinessError } from '[@kit](/user/kit).BasicServicesKit';

[@Entry](/user/Entry)

[@Component](/user/Component)

export struct AudioCapturePage {

  textTimerController: TextTimerController = new TextTimerController()

  [@State](/user/State) format: string = 'mm:ss.SS';

  [@State](/user/State) recordFlag: boolean = true;

  [@State](/user/State) fileNames: string[] = [];

  private context = getContext(this) as common.UIAbilityContext;

  private filesDir: string = this.context.filesDir; // /data/storage/el2/base/haps/entry/files

  private curFile: fs.File | undefined = undefined;

  private audioCapturer: audio.AudioCapturer | undefined = undefined;

  private audioStreamInfo: audio.AudioStreamInfo = {

    samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, // 音频文件的采样率

    channels: audio.AudioChannel.CHANNEL_2, // 音频文件的采样率

    sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, // 音频采样格式

    encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW, // 音频编码格式

    // channelLayout: audio.AudioChannelLayout.CH_LAYOUT_4POINT0, // 音频编码格式

  }

  private audioCapturerInfo: audio.AudioCapturerInfo = {

    source: audio.SourceType.SOURCE_TYPE_MIC, // 音源类型

    capturerFlags: 0 // 音频采集器标志

  }

  private audioCapturerOptions: audio.AudioCapturerOptions = {

    streamInfo: this.audioStreamInfo, // 表示音频流信息。

    capturerInfo: this.audioCapturerInfo // 表示音频流信息。

  }

  build() {

    NavDestination() {

      Column({ space: 20 }) {

        Column() {

          Text('录音').fontSize(24).fontWeight(500).fontColor(Color.Gray)

          TextTimer({ isCountDown: false, count: 30000, controller: this.textTimerController })

            .format(this.format)

            .fontColor(Color.Black)

            .fontSize(50)

            .onTimer((utc: number, elapsedTime: number) => {

              console.info('AudioCaptureDemo textTimer notCountDown utc is:' + utc + ', elapsedTime: ' + elapsedTime)

            })

          Row() {

            Image(this.recordFlag ? $r('app.media.startIcon') : $r('app.media.ic_camera_story_playing'))

              .width(50)

              .height(50)

              .onClick(() => {

                if (this.recordFlag) {

                  this.startRecordingProcess()

                } else {

                  this.stopRecordingProcess()

                }

              })

          }

          .width('100%')

          .justifyContent(FlexAlign.SpaceAround)

        }

        .width('95%')

        .padding(20)

        .borderRadius(20)

        .backgroundColor(Color.White)

        Column({ space: 5 }) {

          Button('文件列表').onClick(() => {

            this.getFileList()

          })

          Scroll() {

            List({ space: 5 }) {

              ForEach(this.fileNames, (item: string, index: number) => {

                ListItem() {

                  Text(item).backgroundColor('#f1f3f5').padding({ left: 20, right: 20, top: 8, bottom: 8 })

                    .width('100%').borderRadius(15)

                }

              })

            }

          }

          .scrollBar(BarState.Off)

        }

        .width('95%')

        .height('50%')

        .padding(20)

        .borderRadius(20)

        .backgroundColor(Color.White)

      }

      .width('100%')

      .height('100%')

      .backgroundColor($r('app.color.start_window_background'))

    }

  }

  aboutToAppear(): void {

    let permissions: Array<Permissions> = ['ohos.permission.MICROPHONE', 'ohos.permission.READ_MEDIA', 'ohos.permission.WRITE_MEDIA'];

    let atManager: abilityAccessCtrl.AtManager = abilityAccessCtrl.createAtManager();

    // requestPermissionsFromUser会判断权限的授权状态来决定是否唤起弹窗

    atManager.requestPermissionsFromUser(this.context, permissions).then((data: PermissionRequestResult) => {

      let grantStatus: Array<number> = data.authResults;

      let length: number = grantStatus.length;

      for (let i = 0; i < length; i++) {

        if (grantStatus[i] != 0) {

          // 用户拒绝授权,提示用户必须授权才能访问当前页面的功能,并引导用户到系统设置中打开相应的权限

          return;

        }

      }

      this.createAudioCapturer();

    }).catch((err: BusinessError) => {

      console.info('Failed to request permissions from user.');

    })

  }

  aboutToDisappear(): void {

    if (this.audioCapturer) {

      this.audioCapturer.release();

      (this.audioCapturer as audio.AudioCapturer).release((err: BusinessError) => {

        if (err) {

          console.error('Capturer release failed.');

        } else {

          console.info('Capturer release success.');

        }

      });

    }

  }

  getFile(): fs.File {

    let fileName: string = this.filesDir + '/Audio_' + Date.parse(new Date().toString()) + ".wav";

    console.info(`AudioCaptureDemo file is ${fileName}`)

    let file: fs.File = fs.openSync(fileName, fs.OpenMode.READ_WRITE | fs.OpenMode.CREATE)

    return file;

  }

  // 开始录制对应的流程

  async startRecordingProcess() {

    try {

      if (this.audioCapturer === undefined) {

        console.error('AudioCaptureDemo audioCapturer is undefined.')

        await this.createAudioCapturer();

      }

      if (this.audioCapturer !== undefined) {

        await this.audioCapturer.start();

        this.textTimerController.start();

        this.curFile = this.getFile();

        let numBuffersToCapture = 150; // 循环写入150次

        let count = 0;

        while (numBuffersToCapture) {

          let bufferSize = await this.audioCapturer.getBufferSize();

          let buffer = await this.audioCapturer.read(bufferSize, true);

          if (buffer === undefined) {

            console.error('AudioCaptureDemo read buffer failed.')

          } else {

            let number = fs.writeSync(this.curFile.fd, buffer, { offset: count * bufferSize, length: bufferSize });

            console.info(`AudioCaptureDemo write date: ${number}`)

          }

          numBuffersToCapture--;

          count++;

        }

        this.recordFlag = false;

      }

    } catch (err) {

      console.error('AudioCaptureDemo startRecordingProcess' + JSON.stringify(err))

    }

  }

  // 停止录制对应的流程

  async stopRecordingProcess() {

    if (this.audioCapturer === undefined) {

      console.error('AudioCaptureDemo audioCapturer is undefined.')

      return;

    }

    if (this.audioCapturer.state.valueOf() !== audio.AudioState.STATE_RUNNING && this.audioCapturer.state.valueOf() !== audio.AudioState.STATE_PAUSED) {

      console.error('AudioCaptureDemo audioCapturer state is not STATE_RUNNING or STATE_PAUSED.')

      return;

    }

    await this.audioCapturer.stop();

    if (this.audioCapturer.state.valueOf() !== audio.AudioState.STATE_STOPPED) {

      console.error('AudioCaptureDemo audioCapturer stop failed.')

    } else {

      console.info('AudioCaptureDemo audioCapturer stop success.')

    }

    this.textTimerController.reset();

    fs.closeSync(this.curFile)

    this.recordFlag = true;

    this.getFileList()

  }

  getFileList() {

    this.fileNames = fs.listFileSync(this.filesDir, { filter: { suffix: [".wav"] } });

    console.info('fileNames = ' + this.fileNames);

  }

  async createAudioCapturer() {

    try {

      this.audioCapturer = await audio.createAudioCapturer(this.audioCapturerOptions);

      this.setAudioCapturerCallBack(this.audioCapturer);

    } catch (error) {

      let err = error as BusinessError;

      console.error(`AudioCaptureDemo AudioCapturer Created : ERROR : ${JSON.stringify(err)} stack: ${err.stack}`);

    }

  }

  setAudioCapturerCallBack(audioCapturer: audio.AudioCapturer) {

    audioCapturer.on('stateChange', (state: audio.AudioState) => {

      switch (state) {

        case audio.AudioState.STATE_NEW: // 创建新实例状态

          console.info('AudioCaptureDemo audio capturer state is: STATE_NEW')

          break;

        case audio.AudioState.STATE_PREPARED: // 准备状态

          console.info('AudioCaptureDemo audio capturer state is: STATE_PREPARED')

          break;

        case audio.AudioState.STATE_RUNNING: // 运行状态

          console.info('AudioCaptureDemo audio capturer state is: STATE_RUNNING')

          break;

        case audio.AudioState.STATE_STOPPED: // 停止状态

          console.info('AudioCaptureDemo audio capturer state is: STATE_STOPPED')

          break;

        case audio.AudioState.STATE_RELEASED: // 释放状态

          console.info('AudioCaptureDemo audio capturer state is: STATE_RELEASED')

          break;

        case audio.AudioState.STATE_PAUSED: // 暂停状态

          console.info('AudioCaptureDemo audio capturer state is: STATE_PAUSED')

          break;

        case audio.AudioState.STATE_INVALID: // 无效状态

          console.error('AudioCaptureDemo audio capturer state is: STATE_INVALID')

          break;

        default:

          console.error('AudioCaptureDemo audio capturer state is: unknown')

          break;

      }

    });

  }

}

更多关于HarmonyOS 鸿蒙Next 使用audio.AudioCapturer录制的wav音频无法播放的实战系列教程也可以访问 https://www.itying.com/category-93-b0.html


针对HarmonyOS 鸿蒙Next使用audio.AudioCapturer录制的wav音频无法播放的问题,以下是一些可能的解决方案:

  1. 确认文件格式:AudioCapturer录制得到的是PCM码流,而wav格式是包含文件头的PCM数据。如果直接保存为wav后缀但没有添加正确的文件头,会导致无法播放。可以尝试使用工具将PCM数据转换为wav格式,或者手动添加wav文件头。
  2. 检查权限问题:确保应用已正确申请并获取麦克风权限,以及文件读写权限。权限不足也可能导致录音或播放失败。
  3. 检查文件系统:确认文件系统或存储设备是否存在问题。尝试在不同的存储设备或文件系统上处理文件,看问题是否依旧存在。
  4. 检查录音配置:确认AudioCapturer的配置是否正确,包括采样率、通道数、采样格式等。配置错误可能导致录音数据无法正确解码播放。

如果以上方法都无法解决问题,建议联系官网客服以获取更专业的技术支持。官网地址是:https://www.itying.com/category-93-b0.html

回到顶部