HarmonyOS鸿蒙Next中请问在使用@kit.CameraKit进行预览时,如何能触发一次对焦?
HarmonyOS鸿蒙Next中请问在使用@kit.CameraKit进行预览时,如何能触发一次对焦? 发现在预览过程中,拍摄物体的变化变焦会有延迟,想点击屏幕时触发一次对焦该如何实现呢?
试了一下 session的setFocusPoint方法 也没有效果
3 回复
聚焦模式是自动对焦的话:this.session.setFocusMode(camera.FocusMode.FOCUS_MODE_AUTO)
参考Demo:
import camera from '@ohos.multimedia.camera';
import image from '@ohos.multimedia.image';
import abilityAccessCtrl from '@ohos.abilityAccessCtrl';
import common from '@ohos.app.ability.common';
import fs from '@ohos.file.fs';
import { BusinessError } from '@kit.BasicServicesKit';
import { PhotoAccessHelper } from '@ohos.file.photoAccessHelper';
@Entry
@Component
struct Index3 {
@State message: string = 'Hello World'
private mXComponentController: XComponentController = new XComponentController;
private surfaceId: string = '-1';
@State imgUrl: PixelMap | undefined = undefined;
private context: common.UIAbilityContext = getContext(this) as common.UIAbilityContext;
private previewProfilesObj2: camera.Profile | undefined = undefined;
private receiver: image.ImageReceiver | undefined = undefined;
@State pixmap: PixelMap | undefined = undefined
@State cameraWidth: number = 2772;
@State cameraHeight: number = 1344;
@State photoOutput: camera.PhotoOutput | undefined = undefined;
@State captureSession: camera.CaptureSession | undefined = undefined;
aboutToAppear() {
//申请权限
let context = getContext() as common.UIAbilityContext;
abilityAccessCtrl.createAtManager().requestPermissionsFromUser(context, ['ohos.permission.CAMERA']).then(() => {
this.createDualChannelPreview();
});
}
onPageShow(): void {
this.createDualChannelPreview();
}
async createDualChannelPreview(): Promise<void> {
let cameraManager: camera.CameraManager = camera.getCameraManager(this.context)
let camerasDevices: Array<camera.CameraDevice> = cameraManager.getSupportedCameras(); // 获取支持的相机设备对象
// 获取profile对象
let profiles: camera.CameraOutputCapability = cameraManager.getSupportedOutputCapability(camerasDevices[0], camera.SceneMode.NORMAL_PHOTO);
let previewProfiles: Array<camera.Profile> = profiles.previewProfiles;
// 预览流2
this.previewProfilesObj2 = previewProfiles[0];
this.receiver = image.createImageReceiver(this.previewProfilesObj2.size, 2000, 8);
// 创建 预览流2 输出对象
let imageReceiverSurfaceId: string = await this.receiver.getReceivingSurfaceId();
let previewOutput2: camera.PreviewOutput = cameraManager.createPreviewOutput(this.previewProfilesObj2, imageReceiverSurfaceId);
// 创建拍照输出流
let photoProfilesArray: Array<camera.Profile> = profiles.photoProfiles;
try {
this.photoOutput = cameraManager.createPhotoOutput(photoProfilesArray[0]);
} catch (error) {
let err = error as BusinessError;
console.error('Failed to createPhotoOutput errorCode = ' + err.code);
}
if (this.photoOutput === undefined) {
return;
}
// 创建cameraInput对象
let cameraInput: camera.CameraInput = cameraManager.createCameraInput(camerasDevices[0]);
// 打开相机
await cameraInput.open();
// 会话流程
this.captureSession = cameraManager.createCaptureSession();
// let captureSession: camera.CaptureSession = cameraManager.createCaptureSession();
// let captureSession: camera.PhotoSession = cameraManager.createSession(camera.SceneMode.NORMAL_PHOTO) as camera.PhotoSession;
// 开始配置会话
this.captureSession.beginConfig();
// 把CameraInput加入到会话
this.captureSession.addInput(cameraInput);
// 把 预览流2 加入到会话
this.captureSession.addOutput(previewOutput2);
try {
this.captureSession.addOutput(this.photoOutput);
} catch (error) {
let err = error as BusinessError;
console.error('Failed to addOutput(photoOutput). errorCode = ' + err.code);
}
// 提交配置信息
await this.captureSession.commitConfig();
// 会话开始
await this.captureSession.start();
this.onImageArrival(this.receiver);
this.setPhotoOutputCb(this.photoOutput)
}
async savePicture(buffer: ArrayBuffer, img: image.Image) {
const context = getContext(this);
let photoAccessHelper: PhotoAccessHelper.PhotoAccessHelper = PhotoAccessHelper.getPhotoAccessHelper(context);
let options: PhotoAccessHelper.CreateOptions = {
title: Date.now().toString()
};
let photoUri: string = await photoAccessHelper.createAsset(PhotoAccessHelper.PhotoType.IMAGE, 'jpg', options);
//createAsset的调用需要ohos.permission.READ_IMAGEVIDEO和ohos.permission.WRITE_IMAGEVIDEO的权限
let file: fs.File = fs.openSync(photoUri, fs.OpenMode.READ_WRITE | fs.OpenMode.CREATE);
await fs.write(file.fd, buffer);
fs.closeSync(file);
img.release();
}
setPhotoOutputCb(photoOutput: camera.PhotoOutput) {
//设置回调之后,调用photoOutput的capture方法,就会将拍照的buffer回传到回调中
photoOutput.on('photoAvailable', (errCode: BusinessError, photo: camera.Photo): void => {
console.info('photoAvailable photoOutput的capture方法执行了');
console.info(`err: ${JSON.stringify(errCode)}`);
if (errCode || photo === undefined) {
console.error('getPhoto failed');
return;
}
let imageObj = photo.main;
imageObj.getComponent(image.ComponentType.JPEG, (errCode: BusinessError, component: image.Component): void => {
console.info('getComponent start');
if (errCode || component === undefined) {
console.error('getComponent failed');
return;
}
let buffer: ArrayBuffer;
if (component.byteBuffer) {
buffer = component.byteBuffer;
console.log("buffer::" + buffer.byteLength)
let opts: image.InitializationOptions = { editable: true, pixelFormat: 3, size: { height: 1344, width: (buffer.byteLength/4)/1344 } }
image.createPixelMap(buffer, opts, (error: BusinessError, pixelMap: image.PixelMap) => {
if (error) {
console.info("error::: " + error)
return;
} else {
console.info('Succeeded in creating pixelmap.');
}
})
if (component.byteBuffer as ArrayBuffer) {
let sourceOptions: image.SourceOptions = {
sourceDensity: 120,
sourcePixelFormat: 0, // NV21
sourceSize: {
height: 240,
width: 320
},
}
try {
let imageResource = image.createImageSource(component.byteBuffer, sourceOptions);
imageResource.createPixelMap({}).then(res=>{
this.pixmap = res;
});
} catch (error) {
let err = error as BusinessError;
console.error('Failed to addOutput(photoOutput). errorCode = ' + err.code);
}
} else {
return;
}
} else {
console.error('byteBuffer is null');
return;
}
this.savePicture(buffer, imageObj);
});
});
}
async onImageArrival(receiver: image.ImageReceiver): Promise<void> {
receiver.on('imageArrival', () => {
console.error("imageArrival 接收图片触发");
receiver.readLatestImage((err, nextImage: image.Image) => {
if (err || nextImage === undefined) {
return;
}
nextImage.getComponent(image.ComponentType.JPEG, (err, imgComponent: image.Component) => {
if (err || imgComponent === undefined) {
return;
}
this.saveImageToFile(imgComponent.byteBuffer);
if (imgComponent.byteBuffer as ArrayBuffer) {
let sourceOptions: image.SourceOptions = {
sourceDensity: 120,
sourcePixelFormat: 8, // NV21
sourceSize: {
height: this.previewProfilesObj2!.size.height,
width: this.previewProfilesObj2!.size.width
},
}
let decodingOptions : image.DecodingOptions = {
editable: true,
desiredPixelFormat: 3,
}
let imageResource = image.createImageSource(imgComponent.byteBuffer, sourceOptions);
imageResource.createPixelMap(decodingOptions).then(res=>{
this.imgUrl = res;
});
} else {
return;
}
nextImage.release();
})
})
})
}
saveImageToFile(data: ArrayBuffer) {
const context = getContext(this);
let filePath = context.tempDir + "/test.jpg";
console.info("path is " + filePath);
let file = fs.openSync(filePath, fs.OpenMode.READ_WRITE | fs.OpenMode.CREATE);
fs.write(file.fd, data, (err, writeLen) => {
if (err) {
console.info("write failed with error message: " + err.message + ", error code: " + err.code);
} else {
console.info("write data to file succeed and size is:" + writeLen);
fs.closeSync(file);
}
});
// context.tempDir + "/test.jpg" 对应绝对路径 /data/app/el2/100/base/com.example.image_example/haps/Packer/temp/test.jpg (也可通过hdc shell 查询对应目录)
// hdc file recv /data/app/el2/100/base/com.example.image_example/haps/Packer/temp/test.jpg D:\ (获取文件到本地,查看保存结果)
}
build() {
Column() {
Row() {
// 将编辑好的pixelMap传递给状态变量imagePixelMap后,通过Image组件进行渲染
Image(this.imgUrl).objectFit(ImageFit.Cover).width('100%').height('50%')
// Image($r('app.media.background')).objectFit(ImageFit.Cover).width('100%').height('100%')
}.backgroundColor('#F0F0F0')
Row() {
Button(){
Text("触发一次对焦")
.fontColor(Color.Black)
.alignSelf(ItemAlign.Center)
.onClick(() => {
let flag = this.captureSession?.isFocusModeSupported(camera.FocusMode.FOCUS_MODE_AUTO)
if (flag) {
this.captureSession?.setFocusMode(camera.FocusMode.FOCUS_MODE_AUTO)
this.captureSession?.setFocusPoint({x: 1, y: 1})
console.log("success")
return
}
console.log("failure")
})
}
.width(100)
.height(100)
Button() {
Text("拍照")
.fontColor(Color.Black)
.alignSelf(ItemAlign.Center)
.onClick(() => {
let settings: camera.PhotoCaptureSetting = {
quality: camera.QualityLevel.QUALITY_LEVEL_HIGH, // 设置图片质量高
rotation: camera.ImageRotation.ROTATION_0, // 设置图片旋转角度0
mirror: false // 设置镜像使能开关(默认关)
};
if (this.photoOutput){
this.photoOutput.capture(settings, (err: BusinessError) => {
// if (this.photoOutput){
// this.setPhotoOutputCb(this.photoOutput)
// }
if (err) {
console.error(`Failed to capture the photo. error: ${JSON.stringify(err)}`);
return;
}
console.info('Callback invoked to indicate the photo capture request success.');
});
}
})
}
.width(100)
.height(100)
Image(this.pixmap)
.width(200)
.height(200)
}
}
}
}
更多关于HarmonyOS鸿蒙Next中请问在使用@kit.CameraKit进行预览时,如何能触发一次对焦?的实战系列教程也可以访问 https://www.itying.com/category-93-b0.html
在HarmonyOS鸿蒙Next中,使用@kit.CameraKit
进行预览时,可以通过调用focus()
方法来触发一次对焦。具体步骤如下:
- 获取
CameraKit
实例。 - 调用
CameraKit
实例的focus()
方法。
示例代码片段如下:
import { CameraKit } from '@kit.CameraKit';
// 假设已经获取到CameraKit实例
const cameraKit = ...;
// 触发对焦
cameraKit.focus();
focus()
方法会尝试对当前预览画面进行对焦。该方法通常在用户点击屏幕特定区域时调用,以实现手动对焦功能。
在HarmonyOS鸿蒙Next中,使用@kit.CameraKit
进行预览时,可以通过调用CameraKit
的focus()
方法来触发一次对焦。首先,确保你已经获取了CameraKit
实例,然后调用focus()
方法即可。
示例代码如下:
import { CameraKit } from '@kit.CameraKit';
// 假设你已经获取了CameraKit实例
const cameraKit = ...;
// 触发对焦
cameraKit.focus();
此方法会触发相机进行一次自动对焦操作。