Flutter 拍照 视频 原理分析
前言
最近在看咸鱼的《Flutter技术解析与实战》,直接上来看的第二章,发现能力增强这块有点晕,Texture 是啥东西?快速过了一遍后,就去Flutter官网了解了一下相机示例,针对相机示例进行源码分析,然后回过头看第二章能力增强,着实很轻松。
本文先针对相机示例进行拍照和视频分析 (没有Android端与系统拍照、视频交互源码分析) ,从中可以学到相关插件里面原生和Flutter之间如何共享图像,以及如何在Flutter中嵌套原生组件。最后贴上了 咸鱼这本书的第二章能力增强-基于外接纹理的同层渲染 相关链接。
参考资料
Flutter技术解析与实战 咸鱼技术演进与创建第二章能力增强
通过共享内存优化flutter外接纹理的渲染性能,实时渲染不是梦
目录
一、相机示例
效果图
代码,和中文网示例一样,需要注意相机的初始化和一些设置是异步的。
pubspec.yaml
dependencies:
camera: ^0.5.2+2
video_player: ^0.10.12+2
path_provider: ^0.4.1
main.dart
List<CameraDescription> cameras;
void main() async{
WidgetsFlutterBinding.ensureInitialized();
cameras = await availableCameras();
runApp(MyApp());
}
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Flutter Demo',
theme: ThemeData(
primarySwatch: Colors.blue,
),
home: CameraHome(),
);
}
}
camera.dart
import 'dart:io';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:fluttertwo/main.dart';
import 'package:path_provider/path_provider.dart';
import 'package:video_player/video_player.dart';
class CameraHome extends StatefulWidget {
@override
_CameraHomeState createState() {
return _CameraHomeState();
}
}
class _CameraHomeState extends State<CameraHome> with WidgetsBindingObserver {
CameraController controller;
String imagePath; //图片保存路径
String videoPath; //视频保存路径
VideoPlayerController videoController;
VoidCallback videoPlayerListener;
bool enableAudio = true;
final GlobalKey<ScaffoldState> _scaffoldKey = GlobalKey<ScaffoldState>();
@override
void setState(fn) {
super.setState(fn);
WidgetsBinding.instance.addObserver(this);
}
@override
void dispose() {
WidgetsBinding.instance.removeObserver(this);
super.dispose();
}
@override
void didChangeAppLifecycleState(AppLifecycleState state) {
//如果APP不在前台
if (state == AppLifecycleState.inactive) {
controller?.dispose();
} else if (state == AppLifecycleState.resumed) {
//在前台
if (controller != null) {
onNewCameraSelected(controller.description);
}
}
}
@override
Widget build(BuildContext context) {
return Scaffold(
key: _scaffoldKey,
appBar: AppBar(
title: Text("相机示例"),
),
body: Column(
children: <Widget>[
Expanded(
child: Container(
child: Padding(
padding: EdgeInsets.all(1.0),
child: Center(
child: _cameraPreviewWidget(),
),
),
decoration: BoxDecoration(
color: Colors.black,
border: Border.all(
color: controller != null && controller.value.isRecordingVideo
? Colors.redAccent
: Colors.grey,
width: 3.0,
),
),
),
),
_captureControlRowWidget(),
_toggleAudioWidget(),
Padding(
padding: EdgeInsets.all(5.0),
child: Row(
mainAxisAlignment: MainAxisAlignment.start,
children: <Widget>[
_cameraTogglesRowWidget(),
_thumbnailWidget(),
],
),
),
],
),
);
}
///显示已拍摄的图片/视频缩略图
Widget _thumbnailWidget() {
return Expanded(
child: Align(
alignment: Alignment.centerRight,
child: Row(
mainAxisSize: MainAxisSize.min,
children: <Widget>[
videoController == null && imagePath == null
? Container()
: SizedBox(
child: (videoController == null)
? Image.file(File(imagePath),width: 64.0,height: 64.0,)
: Container(
child: Center(
child: AspectRatio(
aspectRatio: videoController.value.size != null
? videoController.value.aspectRatio
: 1.0,
child: VideoPlayer(videoController),
),
),
decoration: BoxDecoration(
border: Border.all(color: Colors.pink),
),
width: 64.0,
height: 64.0,
),
),
],
),
),
);
}
///展示所有摄像头
Widget _cameraTogglesRowWidget() {
final List<Widget> toggles = <Widget>[];
if (cameras.isEmpty) {
return Text("没有检测到摄像头");
} else {
for (CameraDescription cameraDescription in cameras) {
toggles.add(SizedBox(
width: 90.0,
child: RadioListTile<CameraDescription>(
title: Icon(getCameraLensIcon(cameraDescription.lensDirection)),
groupValue: controller?.description,
value: cameraDescription,
onChanged: controller != null && controller.value.isRecordingVideo
? null
: onNewCameraSelected),
));
}
return Row(
children: toggles,
);
}
}
///开启或关闭录音
Widget _toggleAudioWidget() {
return Padding(
padding: EdgeInsets.only(left: 25),
child: Row(
children: <Widget>[
Text("开启录音"),
Switch(
value: enableAudio,
onChanged: (value) {
enableAudio = value;
if (controller != null) {
onNewCameraSelected(controller.description);
}
},
),
],
),
);
}
///相机工具栏
Widget _captureControlRowWidget() {
return Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly, //均匀放置
mainAxisSize: MainAxisSize.max,
children: <Widget>[
IconButton(
icon: Icon(Icons.camera_alt),
color: Colors.blue,
onPressed: controller != null &&
controller.value.isInitialized &&
!controller.value.isRecordingVideo
? onTakePictureButtonPressed
: null,
),
IconButton(
icon: Icon(Icons.videocam),
color: Colors.blue,
onPressed: controller != null &&
controller.value.isInitialized &&
!controller.value.isRecordingVideo
? onVideoRecordButtonPressed
: null,
),
IconButton(
icon: Icon(Icons.stop),
color: Colors.red,
onPressed: controller != null &&
controller.value.isInitialized &&
controller.value.isRecordingVideo
? onStopButtonPressed
: null,
),
],
);
}
///开始录制视频
void onVideoRecordButtonPressed() {
startVideoRecording().then((value) {
if (mounted) {
setState(() {});
}
if (value != null) {
showInSnackBar("正在保存视频于 ${value}");
}
});
}
///终止视频录制
void onStopButtonPressed() {
stopVideoRecording().then((value) {
if (mounted) {
setState(() {});
}
showInSnackBar("视频保存在: ${videoPath}");
});
}
Future<void> stopVideoRecording() async {
if (!controller.value.isRecordingVideo) {
return null;
}
try {
await controller.stopVideoRecording();
} on CameraException catch (e) {
_showCameraException(e);
return null;
}
await _startVideoPlayer();
}
Future<void> _startVideoPlayer() async {
final VideoPlayerController vcontroller =
VideoPlayerController.file(File(videoPath));
videoPlayerListener = () {
if (videoController != null && videoController.value.size != null) {
if (mounted) {
setState(() {});
}
videoController.removeListener(videoPlayerListener);
}
};
vcontroller.addListener(videoPlayerListener);
await vcontroller.setLooping(true);
await vcontroller.initialize();
await videoController?.dispose();
if (mounted) {
setState(() {
imagePath = null;
videoController = vcontroller;
});
}
await vcontroller.play();
}
Future<String> startVideoRecording() async {
if (!controller.value.isInitialized) {
showInSnackBar("请选择一个摄像头");
return null;
}
//确定视频保存的路径
final Directory extDir = await getApplicationDocumentsDirectory();
final String dirPath = "${extDir.path}/Movies/flutter_test";
await Directory(dirPath).createSync(recursive: true);
final String filePath = "$dirPath/${timestamp()}.mp4";
if (controller.value.isRecordingVideo) {
return null; //正在录制
}
try {
videoPath = filePath;
await controller.startVideoRecording(filePath);
} on CameraException catch (e) {
_showCameraException(e);
return null;
}
return filePath;
}
///拍照按钮点击回调
void onTakePictureButtonPressed() {
takePicture().then((value) {
if (mounted) {
setState(() {
imagePath = value;
videoController?.dispose();
videoController = null;
});
if (value != null) {
showInSnackBar('图片保存在 $value');
}
}
});
}
Future<String> takePicture() async {
if (!controller.value.isInitialized) {
showInSnackBar("错误: 请选择一个相机");
return null;
}
final Directory extDir = await getApplicationDocumentsDirectory();
final String dirPath = '${extDir.path}/Movies/flutter_test';
await Directory(dirPath).createSync(recursive: true);
final String filePath = '$dirPath/${timestamp()}.jpg';
if (controller.value.isTakingPicture) {
return null;
}
try {
await controller.takePicture(filePath);
} on CameraException catch (e) {
_showCameraException(e);
return null;
}
return filePath;
}
String timestamp() => DateTime.now().millisecondsSinceEpoch.toString();
///预览窗口
Widget _cameraPreviewWidget() {
if (controller == null || !controller.value.isInitialized) {
return Text(
"选择一个摄像头",
style: TextStyle(
color: Colors.white,
fontSize: 24.0,
fontWeight: FontWeight.w900,
),
);
} else {
//调整child到设置的宽高比
return AspectRatio(
aspectRatio: controller.value.aspectRatio,
child: CameraPreview(controller),
);
}
}
///摄像头选中回调
void onNewCameraSelected(CameraDescription cameraDescription) async {
if (controller != null) {
await controller.dispose();
}
controller = CameraController(
cameraDescription,
ResolutionPreset.high,
enableAudio: enableAudio,
);
controller.addListener(() {
if (mounted) {
setState(() {});
if (controller.value.hasError) {
showInSnackBar("Camera error ${controller.value.errorDescription}");
}
}
});
try {
await controller.initialize();
} on CameraException catch (e) {
_showCameraException(e);
}
}
_showCameraException(CameraException e) {
logError(e.code, e.description);
showInSnackBar("Error: ${e.code}\n${e.description}");
}
showInSnackBar(String message) {
_scaffoldKey.currentState.showSnackBar(SnackBar(
content: Text(message),
));
}
}
/// 获取不同摄像头的图标(前置、后置、其它)
IconData getCameraLensIcon(CameraLensDirection direction) {
switch (direction) {
case CameraLensDirection.back:
return Icons.camera_rear;
case CameraLensDirection.front:
return Icons.camera_front;
case CameraLensDirection.external:
return Icons.camera;
}
throw ArgumentError("Unknown lens direction");
}
void logError(String code, String message) =>
print('Error: $code\nError Message: $message');
二、Flutter相机工作原理分析
通过上述代码,我们可以看出在使用 camera
和 video_player
插件后 (插件地址) ,Flutter端只需要使用以下关键代码就可以正常拍照和记录视频
//拍照
await controller.takePicture(filePath);
//录制视频
final VideoPlayerController vcontroller =VideoPlayerController.file(File(videoPath));
vcontroller.play()
1、拍照
我们直接看CameraController调用的**takePicture
**方法
Future<void> takePicture(String path) async {
if (!value.isInitialized || _isDisposed) {
throw CameraException(
'Uninitialized CameraController.',
'takePicture was called on uninitialized CameraController',
);
}
if (value.isTakingPicture) {
throw CameraException(
'Previous capture has not returned yet.',
'takePicture was called before the previous capture returned.',
);
}
try {
//注释 1
value = value.copyWith(isTakingPicture: true);
await _channel.invokeMethod<void>(
'takePicture',
<String, dynamic>{'textureId': _textureId, 'path': path},
);
value = value.copyWith(isTakingPicture: false);
} on PlatformException catch (e) {
value = value.copyWith(isTakingPicture: false);
throw CameraException(e.code, e.message);
}
}
final MethodChannel _channel = const MethodChannel('plugins.flutter.io/camera');
接下来去 camera 插件的 android或者 ios模块 找对应的MethodChannel,这里以Android为例。全局搜一下,发现在android模块的MethodCallHandlerImpl中有如下代码
methodChannel = new MethodChannel(messenger, "plugins.flutter.io/camera");
imageStreamChannel = new EventChannel(messenger, "plugins.flutter.io/camera/imageStream");
基本就确定了,Flutter 端 调用的 await _channel.invokeMethod( 'takePicture',<String, dynamic>{'textureId': _textureId, 'path': path},) 会回调到 Android 端 这边onMethodCall
方法
@Override
public void onMethodCall(@NonNull MethodCall call, @NonNull final Result result) {
switch (call.method) {
case "availableCameras": // 示例代码开头 调用了这个方法获取摄像头列表
try {
result.success(CameraUtils.getAvailableCameras(activity));
} catch (Exception e) {
handleException(e, result);
}
break;
case "initialize":
{
if (camera != null) {
camera.close();
}
cameraPermissions.requestPermissions(
activity,
permissionsRegistry,
call.argument("enableAudio"),
(String errCode, String errDesc) -> {
if (errCode == null) {
try {
//注释 1
instantiateCamera(call, result);
} catch (Exception e) {
handleException(e, result);
}
} else {
result.error(errCode, errDesc, null);
}
});
break;
}
case "takePicture":
{
//注释 2
camera.takePicture(call.argument("path"), result);
break;
}
case "prepareForVideoRecording":
{
// This optimization is not required for Android.
result.success(null);
break;
}
case "startVideoRecording":
....
case "stopVideoRecording":
...
case "pauseVideoRecording":
...
case "resumeVideoRecording":
...
case "startImageStream":
...
case "stopImageStream":
...
case "dispose":
...
default:
result.notImplemented();
break;
}
}
方法很多,这里不作分析,直接看 注释1处 instantiateCamera(call, result)
和 注释 2 处 takePicture
,注释 1 处 你可能会发现是不是拍照片没有调用,这是不存在的,在示例代码中,拍照时,选择摄像头是调用的 初始化操作的,即注释 1 处 方法
instantiateCamera
方法内部调用如下
private void instantiateCamera(MethodCall call, Result result) throws CameraAccessException {
String cameraName = call.argument("cameraName");
String resolutionPreset = call.argument("resolutionPreset");
boolean enableAudio = call.argument("enableAudio");
TextureRegistry.SurfaceTextureEntry flutterSurfaceTexture =
textureRegistry.createSurfaceTexture();
DartMessenger dartMessenger = new DartMessenger(messenger, flutterSurfaceTexture.id());
camera = new Camera(activity,flutterSurfaceTexture,dartMessenger,cameraName,resolutionPreset,
enableAudio);
camera.open(result);
}
这里就是获取了一些Flutter端为摄像头设置的操作,关键代码在这两行
DartMessenger dartMessenger = new DartMessenger(messenger, flutterSurfaceTexture.id());
camera.open(result);
DartMessenger 是用来和 Flutter 端 通信的,例如 Flutter 端 CameraController 类
camera.open
内部又做了些什么呢?
public void open(@NonNull final Result result) throws CameraAccessException {
pictureImageReader =
ImageReader.newInstance(
captureSize.getWidth(), captureSize.getHeight(), ImageFormat.JPEG, 2);
// Used to steam image byte data to dart side.
imageStreamReader =
ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
cameraManager.openCamera(
cameraName,
new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice device) {
cameraDevice = device;
try {
startPreview();
} catch (CameraAccessException e) {
result.error("CameraAccess", e.getMessage(), null);
close();
return;
}
Map<String, Object> reply = new HashMap<>();
reply.put("textureId", flutterTexture.id());
reply.put("previewWidth", previewSize.getWidth());
reply.put("previewHeight", previewSize.getHeight());
result.success(reply);
}
@Override
public void onClosed(@NonNull CameraDevice camera) {
dartMessenger.sendCameraClosingEvent();
super.onClosed(camera);
}
.......
}
我们需要关心一下 上面 openCamera 方法 中的 onOpened
方法,摄像头打开后,这里会 先调用startPreview 开启摄像头预览,然后回调 数据给 Flutter 端,包括 textureId
、previewWidth
、previewHeight
, 这里这个是重点,先记下,稍后分析摄像头界面显示的时候会用到。但是在开启摄像头预览里并不简单,内部调用方法如下
public void startPreview() throws CameraAccessException {
//注释 1
createCaptureSession(CameraDevice.TEMPLATE_PREVIEW, pictureImageReader.getSurface());
}
private void createCaptureSession(
int templateType, Runnable onSuccessCallback, Surface... surfaces)
throws CameraAccessException {
// Close any existing capture session.
closeCaptureSession();
// Create a new capture builder.
captureRequestBuilder = cameraDevice.createCaptureRequest(templateType);
// Build Flutter surface to render to
SurfaceTexture surfaceTexture = flutterTexture.surfaceTexture();
surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
Surface flutterSurface = new Surface(surfaceTexture);
captureRequestBuilder.addTarget(flutterSurface);
List<Surface> remainingSurfaces = Arrays.asList(surfaces);
......
//注释 4
// Collect all surfaces we want to render to.
List<Surface> surfaceList = new ArrayList<>();
surfaceList.add(flutterSurface);
surfaceList.addAll(remainingSurfaces);
// Start the session
cameraDevice.createCaptureSession(surfaceList, callback, null);
}
注释 1 处 的 pictureImageReader 是 ImageReader 类型的,这个类被允许直接从Surface接收渲染的图像数据,
注释 4 处 收集所有 Surfaces 交给 CameraDevice ,该类是CameraManager调用 openCamera 回调过来的,CameraDevice是连接在安卓设备上的单个相机的抽象表示,具体实现可以看 CameraDeviceImpl 类,音视频相关开发应该很清楚,作为一个渣渣就不介绍了。createCaptureSession 方法的三个参数含义分别为 每个 CaptureRequest 的输出 Surface 集合 、 创建会话的回调、指定回调执行的线程
接下来看 takePicture
方法
camera.takePicture(call.argument("path"), result); //第一个参数 是Flutter端传过来的文件路径 第二个参数传递过来是用来设置回复数据的
注意,这里的camera 对象类型是 io.flutter.plugins.camera.Camera , takePicture 方法 如下
public void takePicture(String filePath, @NonNull final Result result) {
......
pictureImageReader.setOnImageAvailableListener(
reader -> {
try (Image image = reader.acquireLatestImage()) {
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
writeToFile(buffer, file);
result.success(null);
......
}
然后Flutter端显示的话,直接用 Image.file
最后还有一个东西没有分析,那就是摄像头 在 Flutter 端是 如何显示 的,即示例效果图的上半部分
在示例中,是调用 CameraPreview(controller)
就直接显示出来了
class CameraPreview extends StatelessWidget {
const CameraPreview(this.controller);
final CameraController controller;
@override
Widget build(BuildContext context) {
return controller.value.isInitialized
? Texture(textureId: controller._textureId)
: Container();
}
}
关键显示代码 是 Texture(textureId: controller._textureId)
,那这个controller的 _textureId是怎么得到的
Future<void> initialize() async {
......
final Map<String, dynamic> reply =
await _channel.invokeMapMethod<String, dynamic>(
'initialize',
<String, dynamic>{
'cameraName': description.name,
'resolutionPreset': serializeResolutionPreset(resolutionPreset),
'enableAudio': enableAudio,
},
);
_textureId = reply['textureId'];
.....
return _creatingCompleter.future;
}
之前 提到过, textureId 会在相机初始化时从 Android 端 回传给 Flutter 端,那这个 textureId 有什么作用呢?继续看Texture dart 类
class Texture extends LeafRenderObjectWidget {
/// Creates a widget backed by the texture identified by [textureId].
const Texture({
Key key,
@required this.textureId,
}) : assert(textureId != null),
super(key: key);
/// The identity of the backend texture.
final int textureId;
@override
TextureBox createRenderObject(BuildContext context) => TextureBox(textureId: textureId);
@override
void updateRenderObject(BuildContext context, TextureBox renderObject) {
renderObject.textureId = textureId;
}
}
Texture
代码很少,它是继承自LeafRenderObjectWidget
的,这里涉及自定义组件的相关知识,可以 简单先了解一下 Flutter中文网自定义组件 ,在构建Texture 时,createRenderObject
是必须会执行的,所以我们看一下返回的 TextureBox
class TextureBox extends RenderBox {
TextureBox({ @required int textureId })
: assert(textureId != null),
_textureId = textureId;
int _textureId;
set textureId(int value) {
assert(value != null);
if (value != _textureId) {
_textureId = value;
markNeedsPaint();
}
}
@override
void paint(PaintingContext context, Offset offset) {
if (_textureId == null)
return;
context.addLayer(TextureLayer(
rect: Rect.fromLTWH(offset.dx, offset.dy, size.width, size.height),
textureId: _textureId,
));
}
}
在paint方法中,利用TextureLayer
进行数据设置并放入PaintingContext
中,其实这里我们或多或少能猜出来, textureId 就是用来给 Flutter 端提供获取 图像视图的 凭证。
2、视频
VideoPlayerController
和 CameraController
类 的作用一样,只不过一个是用来拍照的,一个是用来录制视频的,textureId 也是在 VideoPlayerController 中获取的,都是在 调用 initialize 方法获取的,只不过 VideoPlayerController 中 是通过 _videoPlayerPlatform.create(dataSourceDescription)
获取的,大概代码流程如下
//1
_textureId = await _videoPlayerPlatform.create(dataSourceDescription);
//2 _videoPlayerPlatform
final VideoPlayerPlatform _videoPlayerPlatform = VideoPlayerPlatform.instance..init();
//3 instance -> _instance
static VideoPlayerPlatform _instance = MethodChannelVideoPlayer();
//4
@override
Future<int> create(DataSource dataSource) async {
.....
TextureMessage response = await _api.create(message);
return response.textureId;
}
//5._api 类型
VideoPlayerApi _api = VideoPlayerApi();
//6\. _api_create方法
Future<TextureMessage> create(CreateMessage arg) async {
final Map<dynamic, dynamic> requestMap = arg._toMap();
const BasicMessageChannel<dynamic> channel = BasicMessageChannel<dynamic>(
'dev.flutter.pigeon.VideoPlayerApi.create', StandardMessageCodec());
final Map<dynamic, dynamic> replyMap = await channel.send(requestMap);
if (replyMap == null) {
throw PlatformException(
code: 'channel-error',
message: 'Unable to establish connection on channel.',
details: null);
} else if (replyMap['error'] != null) {
final Map<dynamic, dynamic> error = replyMap['error'];
throw PlatformException(
code: error['code'],
message: error['message'],
details: error['details']);
} else {
return TextureMessage._fromMap(replyMap['result']);
}
}
这里利用了 BasicMessageChannel 进行通信,将数据通过 StandardMessageCodec 进行解码。
大体 textureId
获取流程就是这样,其实和拍照 类似,异曲同工。剩下的这里不分析了,可以去文章参考资料中的插件去下载,看看。
三、Flutter技术解析与实战能力增强部分
这里刚开始准备贴书中内容,后面发现咸鱼技术在知乎上发表了,排版比CSDN好,直接贴链接了。
转载自:https://juejin.cn/post/6866815272636907527