HarmonyOS 鸿蒙Next图片视频预览页DEMO

发布于 1周前 作者 yuanlaile 来自 鸿蒙OS

HarmonyOS 鸿蒙Next图片视频预览页DEMO 图片视频预览页DEMO

2 回复

https://developer.huawei.com/consumer/cn/doc/harmonyos-references-V5/ts-basic-components-xcomponent-V5

// 导入camera接口,接口中提供了相机相关的属性和方法

import camera from ‘@ohos.multimedia.camera’;

import image from ‘@ohos.multimedia.image’;

import abilityAccessCtrl from ‘@ohos.abilityAccessCtrl’;

import common from ‘@ohos.app.ability.common’;

import fs from ‘@ohos.file.fs’;

import { BusinessError } from ‘@ohos.base’;

@Entry @Component struct Index { @State message: string = ‘Hello World’ //XComponent组件为预览流提供的Surface,创建Surface,用于画面显示 private mXComponentController: XComponentController = new XComponentController; private surfaceId: string = ‘-1’; @State imgUrl: PixelMap | undefined = undefined; private context: Context | undefined = undefined private pathDir = getContext().filesDir;

aboutToAppear() {

//申请权限
let context = getContext() as common.UIAbilityContext;
abilityAccessCtrl.createAtManager().requestPermissionsFromUser(context, ['ohos.permission.MICROPHONE',
  'ohos.permission.CAMERA', 'ohos.permission.MEDIA_LOCATION', 'ohos.permission.WRITE_MEDIA']).then(() => {
});
console.info(`surfaceId=${this.surfaceId}`);

}

// 创建预览输出流获取的surfaceId。 async createDualChannelPreview(XComponentSurfaceId: string, receiver: image.ImageReceiver): Promise<void> { let cameraManager = camera.getCameraManager(getContext()); let camerasDevices: Array<camera.CameraDevice> = cameraManager.getSupportedCameras(); // 获取支持的相机设备对象

// 获取profile对象
let profiles: camera.CameraOutputCapability =
  cameraManager.getSupportedOutputCapability(camerasDevices[0]); // 获取对应相机设备profiles
let previewProfiles: Array<camera.Profile> = profiles.previewProfiles;

// 预览流1
let previewProfilesObj: camera.Profile = previewProfiles[0];

// 预览流2
let previewProfilesObj2: camera.Profile = previewProfiles[0];

// 创建 预览流1 输出对象
let previewOutput: camera.PreviewOutput =
  cameraManager.createPreviewOutput(previewProfilesObj, XComponentSurfaceId);
console.log("testtagemy-XComponentSurfaceId:" + JSON.stringify(XComponentSurfaceId))

// 创建 预览流2 输出对象
let imageReceiverSurfaceId: string = await receiver.getReceivingSurfaceId();
let previewOutput2: camera.PreviewOutput =
  cameraManager.createPreviewOutput(previewProfilesObj2, imageReceiverSurfaceId);
console.log("testtagemy-imageReceiverSurfaceId:" + JSON.stringify(imageReceiverSurfaceId))

// 创建cameraInput对象
let cameraInput: camera.CameraInput = cameraManager.createCameraInput(camerasDevices[0]); //0 -1

// 打开相机
await cameraInput.open();

// 会话流程
let captureSession: camera.CaptureSession = cameraManager.createCaptureSession();

// 开始配置会话
captureSession.beginConfig();

// 把CameraInput加入到会话
captureSession.addInput(cameraInput);

// 把 预览流1 加入到会话
captureSession.addOutput(previewOutput);

// 把 预览流2 加入到会话
captureSession.addOutput(previewOutput2);

// 提交配置信息
await captureSession.commitConfig();

// 会话开始
await captureSession.start(); //会话

}

// 通过Surface进行数据传递,通过ImageReceiver的surface获取预览图像。 async onImageArrival(receiver: image.ImageReceiver): Promise<void> { receiver.on(‘imageArrival’, () => { receiver.readNextImage(async (err, nextImage: image.Image) => { if (err || nextImage === undefined) { return; } let imageComponent = await nextImage.getComponent(4); if (imageComponent.byteBuffer.byteLength > 0) { let path: string = this.pathDir + “/image.yuv”; console.log("===========" + path); let file = fs.openSync(path, fs.OpenMode.READ_WRITE | fs.OpenMode.CREATE); fs.write(file.fd, imageComponent.byteBuffer).then((writeLen) => { console.info(“write data to file succeed and size is:” + writeLen); fs.closeSync(file); }).catch((err: BusinessError) => { console.info(“write data to file failed with error message: " + err.message + “, error code: " + err.code); }); } nextImage.release() nextImage.getComponent(image.ComponentType.JPEG, async (err, imgComponent: image.Component) => { if (err || imgComponent === undefined) { return; } console.log(”=========================================”) if (imgComponent.byteBuffer as ArrayBuffer) { let sourceOptions: image.SourceOptions = { sourceDensity: 120, sourcePixelFormat: 8, // NV21 sourceSize: { height: 480, width: 640 } } let imageResource = image.createImageSource(imgComponent.byteBuffer, sourceOptions); this.imgUrl = await imageResource.createPixelMap(); await imageResource.release(); } else { return; } nextImage.release() }) }) }) }

build() { Column() { // 创建XComponent XComponent({ id: ‘’, type: ‘surface’, libraryname: ‘’, controller: this.mXComponentController }) .onLoad(() => {

      // 设置Surface宽高(1920*1080),预览尺寸设置参考前面 previewProfilesArray 获取的当前设备所支持的预览分辨率大小去设置
      this.mXComponentController.setXComponentSurfaceSize({ surfaceWidth: 1920, surfaceHeight: 1080 });

      // 获取Surface ID
      this.surfaceId = this.mXComponentController.getXComponentSurfaceId();
      let receiver: image.ImageReceiver = image.createImageReceiver(640, 480, 4, 8);
      this.createDualChannelPreview(this.surfaceId, receiver);
      this.onImageArrival(receiver);
    })
    .width('1920px')
    .height('1080px')
  Row() {
    // 将编辑好的pixelMap传递给状态变量imagePixelMap后,通过Image组件进行渲染
    // Image(this.imgUrl).objectFit(ImageFit.None)
  }.width('100%').height('50%').backgroundColor('#F0F0F0')
}

} }

更多关于HarmonyOS 鸿蒙Next图片视频预览页DEMO的实战系列教程也可以访问 https://www.itying.com/category-93-b0.html


针对“HarmonyOS 鸿蒙Next图片视频预览页DEMO”这一问题,以下是直接相关的回答:

HarmonyOS 鸿蒙Next图片视频预览页DEMO的实现,主要依赖于鸿蒙系统提供的媒体组件和UI框架。开发者可以通过使用鸿蒙的ArkUI框架,结合媒体播放组件(如Image和VideoPlayer等),来构建图片和视频预览的功能。

具体来说,实现图片预览时,可以使用Image组件加载并显示图片;实现视频预览时,则需要用到VideoPlayer组件,并配置相应的视频源和播放控制逻辑。此外,为了提升用户体验,还可以结合鸿蒙的动画和布局系统,为预览页添加过渡动画和响应式布局。

在开发过程中,开发者需要确保所使用的组件和API与鸿蒙系统的版本兼容,并遵循鸿蒙的开发规范和最佳实践。同时,为了优化性能和资源使用,可以对图片和视频进行预处理,如压缩、裁剪等。

如果开发者在构建HarmonyOS 鸿蒙Next图片视频预览页DEMO时遇到具体问题,可以查阅鸿蒙的官方文档和开发者社区,获取更多的技术支持和解决方案。如果问题依旧没法解决请联系官网客服,官网地址是:https://www.itying.com/category-93-b0.html。

回到顶部