HarmonyOS鸿蒙Next中为啥不能把预览流保存图片到文件?
HarmonyOS鸿蒙Next中为啥不能把预览流保存图片到文件? 为啥不能把预览流保存图片到文件?
        
          4 回复
        
      
      
        参考例子:
import camera from '@ohos.multimedia.camera';
import image from '@ohos.multimedia.image';
import abilityAccessCtrl from '@ohos.abilityAccessCtrl';
import common from '@ohos.app.ability.common';
import fs from '@ohos.file.fs';
import {BusinessError} from '@ohos.base';
@Entry
@Component
struct Index {
  @State message: string = 'Hello World'
  private mXComponentController: XComponentController = new XComponentController;
  private surfaceId: string = '-1';
  @State imgUrl: PixelMap | undefined = undefined;
  private context: ESObject = undefined
  private previewProfilesObj2: camera.Profile | undefined = undefined;
  private receiver: image.ImageReceiver | undefined = undefined;
  aboutToAppear() {
    let context = getContext() as common.UIAbilityContext;
    abilityAccessCtrl.createAtManager().requestPermissionsFromUser(context, ['ohos.permission.CAMERA']).then(() => {
      this.mXComponentController.setXComponentSurfaceSize({ surfaceWidth: 1920, surfaceHeight: 1080 });
      this.surfaceId = this.mXComponentController.getXComponentSurfaceId();
      this.createDualChannelPreview(this.surfaceId);
    });
    console.info(`surfaceId=${this.surfaceId}`);
  }
  async createDualChannelPreview(XComponentSurfaceId: string): Promise<void> {
    let cameraManager = await camera.getCameraManager(getContext() as ESObject);
    let camerasDevices: Array<camera.CameraDevice> = cameraManager.getSupportedCameras(); // 获取支持的相机设备对象
    let sceneModes: Array<camera.SceneMode> = cameraManager.getSupportedSceneModes(camerasDevices[0]);
    let isSupportPhotoMode: boolean = sceneModes.indexOf(camera.SceneMode.NORMAL_PHOTO) >= 0;
    if (!isSupportPhotoMode) {
      console.error('photo mode not support');
      return;
    }
    let profiles: camera.CameraOutputCapability = cameraManager.getSupportedOutputCapability(camerasDevices[0]); // 获取对应相机设备profiles
    let previewProfiles: Array<camera.Profile> = profiles.previewProfiles;
    let previewProfilesObj: camera.Profile = previewProfiles[0];
    this.previewProfilesObj2 = previewProfiles[0];
    let size:image.Size = {
      // 此处配置保存图片的分辨率
      height: this.previewProfilesObj2.size.height,
      width: this.previewProfilesObj2.size.width
    }
    this.receiver= image.createImageReceiver(size, image.ImageFormat.JPEG, 8)
    let previewOutput: camera.PreviewOutput = cameraManager.createPreviewOutput(previewProfilesObj, XComponentSurfaceId);
    let imageReceiverSurfaceId: string = await this.receiver.getReceivingSurfaceId();
    let previewOutput2: camera.PreviewOutput = cameraManager.createPreviewOutput(this.previewProfilesObj2, imageReceiverSurfaceId);
    let cameraInput: camera.CameraInput = cameraManager.createCameraInput(camerasDevices[0]);
    await cameraInput.open();
    let captureSession: camera.CaptureSession = cameraManager.createCaptureSession();
    captureSession.beginConfig();
    captureSession.addInput(cameraInput);
    captureSession.addOutput(previewOutput);
    captureSession.addOutput(previewOutput2);
    await captureSession.commitConfig();
    await captureSession.start();
    this.onImageArrival(this.receiver);
  }
  async onImageArrival(receiver: image.ImageReceiver): Promise<void> {
    receiver.on('imageArrival', () => {
      console.error("imageArrival callback");
      receiver.readNextImage((err, nextImage: image.Image) => {
        let a = nextImage.format
        nextImage.getComponent(image.ComponentType.JPEG, async (err, imgComponent: image.Component) => {
          if (err || imgComponent === undefined) {
            return;
          }
          this.saveImageToFile(imgComponent.byteBuffer);
          if (imgComponent.byteBuffer as ArrayBuffer) {
            let sourceOptions: image.SourceOptions = {
              sourceDensity: 120,
              sourcePixelFormat: 8, // NV21
              sourceSize: {
                height: this.previewProfilesObj2!.size.height,
                width: this.previewProfilesObj2!.size.width
              },
            }
            let imageResource = image.createImageSource(imgComponent.byteBuffer, sourceOptions);
            let imagePackerApi = image.createImagePacker();
            let packOpts: image.PackingOption = { format: "image/jpeg", quality: 98 };
            const filePath: string = getContext().cacheDir + "/image.jpg";
            let file = fs.openSync(filePath, fs.OpenMode.CREATE | fs.OpenMode.READ_WRITE);
            imagePackerApi.packToFile(imageResource, file.fd, packOpts).then(() => {
              console.error('pack success: ' + filePath);
            }).catch((error: BusinessError) => {
              console.error('Failed to pack the image. And the error is: ' + error);
            })
            imageResource.createPixelMap({}).then((res)=>{
              this.imgUrl = res;
            });
          } else {
            return;
          }
          nextImage.release();
        })
      })
    })
  }
  saveImageToFile(data: ArrayBuffer) {
    const context = getContext(this);
    let filePath = context.tempDir + "/test.jpg";
    console.info("path is " + filePath);
    let file = fs.openSync(filePath, fs.OpenMode.READ_WRITE | fs.OpenMode.CREATE);
    fs.write(file.fd, data, (err, writeLen) => {
      if (err) {
        console.info("write failed with error message: " + err.message + ", error code: " + err.code);
      } else {
        console.info("write data to file succeed and size is:" + writeLen);
        fs.closeSync(file);
      }
    });
  }
  build() {
    Column() {
      XComponent({
        id: '',
        type: 'surface',
        libraryname: '',
        controller: this.mXComponentController
      })
        .onLoad(() => {
          this.mXComponentController.setXComponentSurfaceSize({ surfaceWidth: 1920, surfaceHeight: 1080 });
          this.surfaceId = this.mXComponentController.getXComponentSurfaceId();
          this.createDualChannelPreview(this.surfaceId);
        })
        .width('1920px')
        .height('1080px')
      Row() {
        Image(this.imgUrl).objectFit(ImageFit.None)
      }.width('100%').height('50%').backgroundColor('#F0F0F0')
    }
  }
}
注:要全屏还要不变形 需要获取手机的宽高比 用手机的屏幕的height /width 去和支持的预览尺寸的 width/ height 去取最贴近的值 例如在mate60pro上 2592 1200 这套参数两个比值只相差 0.002 最合适
更多关于HarmonyOS鸿蒙Next中为啥不能把预览流保存图片到文件?的实战系列教程也可以访问 https://www.itying.com/category-93-b0.html
如果代码里手动设置了rotate(-90),摄像头拍出来的图片,角度大小会跟摄像头看到的图片不一致。因为这个接口是实时刷数据的,rotate是异步方法,相当于上一个没旋转完,新的数据就覆盖了导致抖动的现象。如何解决就是在回调里设置url,只设置旋转还不行,还要进行反转才可以
```javascript
res.rotate(90).then(() => {
    res.flip(false, true).then(() => {
        this.imgUrl = res;
    });
});
在HarmonyOS鸿蒙Next中,预览流无法直接保存为图片文件的原因可能与系统的安全机制和权限管理有关。鸿蒙系统在设计上注重数据安全和隐私保护,预览流通常涉及实时数据处理,直接保存可能会带来潜在的安全风险。此外,鸿蒙系统的文件存储接口可能对预览流的处理方式有特定限制,开发者需要通过系统提供的API或特定的数据转换流程来实现保存操作。具体实现方式需参考鸿蒙开发者文档中的相关接口和权限说明。
在HarmonyOS鸿蒙Next中,不能直接将预览流保存为图片到文件的原因可能是系统设计上为了优先保障数据安全和隐私保护。预览流通常包含敏感信息,直接保存可能违反隐私政策。此外,系统可能未提供直接保存预览流的API,开发者需要通过其他方式处理图像数据,如使用图像处理库或自定义实现。建议参考官方文档或社区资源,寻找合适的解决方案。
 
        
       
                   
                   
                  

