要在Camera 2 API片段中捕获并预览显示到图像视图,你需要遵循以下步骤:
Camera 2 API是Android提供的一个用于控制相机硬件的接口,它提供了比Camera API更强大的功能和更细粒度的控制。
以下是一个简化的示例代码,展示了如何在Camera 2 API中设置预览并显示到ImageView:
public class CameraPreviewFragment extends Fragment {
private CameraDevice cameraDevice;
private CameraCaptureSession captureSession;
private ImageReader imageReader;
private SurfaceTexture surfaceTexture;
private ImageView imageView;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_camera_preview, container, false);
imageView = view.findViewById(R.id.imageView);
return view;
}
@Override
public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
SurfaceView surfaceView = view.findViewById(R.id.surfaceView);
surfaceTexture = new SurfaceTexture(10);
surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
openCamera();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {}
});
}
private void openCamera() {
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
try {
String cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] outputSizes = map.getOutputSizes(SurfaceTexture.class);
Size chosenSize = outputSizes[0]; // Choose the smallest size for preview
manager.openCamera(cameraId, new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
cameraDevice = camera;
createCameraPreviewSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
camera.close();
cameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
camera.close();
cameraDevice = null;
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void createCameraPreviewSession() {
try {
Surface surface = new Surface(surfaceTexture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
if (cameraDevice == null) return;
captureSession = session;
updatePreview();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void updatePreview() {
if (cameraDevice == null) return;
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
captureSession.setRepeatingRequest(captureRequestBuilder.build(), null, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
// Handle image capture and display to imageView
private ImageReader.OnImageAvailableListener imageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
if (image != null) {
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
imageView.setImageBitmap(bitmap);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
};
@Override
public void onDestroyView() {
super.onDestroyView();
if (cameraDevice != null) {
cameraDevice.close();
}
if (captureSession != null) {
captureSession.close();
}
if (imageReader != null) {
imageReader.close();
}
}
}
通过以上步骤和代码示例,你应该能够在Camera 2 API中实现相机预览并显示到ImageView。
领取专属 10元无门槛券
手把手带您无忧上云