AVFoundation - 拍照(Simple)
1:基础
/*
1:获取可用输入设备 AVCaptureDevice
2:设置输入设备: [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:nil]
3:设置输出设备: AVCaptureStillImageOutput 如果iOS10以上 AVCapturePhotoOutput
4:设置Session:AVCaptureSession初始化,设置Preset,添加输入输出设备。
5:预览layer: AVCaptureVideoPreviewLayer, + (instancetype)layerWithSessionWithNoConnection:(AVCaptureSession *)session
6:获取照片: AVCapturePhotoOutput通过代理可以获取图片,AVCaptureStillImageOutput通过Block获取 https://blog.csdn.net/vkooy/article/details/60867268
1) 前后置摄像头的切换 前后值不能切换,各种尝试找了半天没找到有原因。后来发现我在设置图片尺寸的时候设置为1080P [self.session canSetSessionPreset: AVCaptureSessionPreset1920x1080] ,前置摄像头并不支持这么大的尺寸,所以就不能切换前置摄像头。我验证了下 前置摄像头最高支持720P,720P以内可自由切换。 当然也可以在前后置摄像头切换的时候,根据前后摄像头来设置不同的尺寸,这里不在赘述。
2)焦点位置 CGPoint focusPoint = CGPointMake( point.y /size.height ,1-point.x/size.width );
setExposurePointOfInterest:focusPoint 函数后面Point取值范围是取景框左上角(0,0)到取景框右下角(1,1)之间。官方是这么写的:
The value of this property is a CGPoint that determines the receiver's focus point of interest, if it has one. A value of (0,0) indicates that the camera should focus on the top left corner of the image, while a value of (1,1) indicates that it should focus on the bottom right. The default value is (0.5,0.5).
我也试了按这个来但位置就是不对,只能按上面的写法才可以。前面是点击位置的y/PreviewLayer的高度,后面是1-点击位置的x/PreviewLayer的宽度 3)对焦和曝光 我在设置对焦是 先设置了模式setFocusMode,后设置对焦位置,就会导致很奇怪的现象,对焦位置是你上次点击的位置。所以一定要先设置位置,再设置对焦模式。
曝光同上
*/
2:代码
#import "FFCameraHelper.h"
#define FFCameraHelperScreenWidth ([UIScreen mainScreen].bounds.size.width)
#define FFCameraHelperScreenHeight ([UIScreen mainScreen].bounds.size.height)
#define FFCameraHelperFocusLayerWidth (80) @interface FFCameraHelper ()<AVCapturePhotoCaptureDelegate>
/// 可用的设备
@property (nonatomic, strong) NSArray<AVCaptureDevice *> *captureDeviceList;
/// 捕获的设备
@property (nonatomic, strong) AVCaptureDevice *captureDevice;
/// 输入设备
@property (nonatomic, strong) AVCaptureDeviceInput *captureInput;
/// 输出图片
@property (nonatomic, strong) AVCapturePhotoOutput *photoOutput;
/// 输出数据
@property (nonatomic, strong) AVCaptureMetadataOutput *metaDataOutput; /// 输出图片 iOS10.0 以下
@property (nonatomic, strong) AVCaptureStillImageOutput *imageOutput; /// 流: 把输入流和输出流结合在一起,并启动设备
@property (nonatomic, strong) AVCaptureSession *captureSession; ///
@property (nonatomic, strong) CAShapeLayer *focusLayer; @end @implementation FFCameraHelper
- (instancetype)init
{
self = [super init];
if (self) {
[self FF_ReqeuestAuthortion];
/// 授权
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if (status == AVAuthorizationStatusAuthorized) {
[self FF_initSetUp];
}
}
return self;
} /// 请求授权
- (void)FF_ReqeuestAuthortion {
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if (status == AVAuthorizationStatusNotDetermined) {
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) { }];
}
} - (void)FF_initSetUp {
// 1: 设置后置摄像头
[self FF_AcquireNeedDevice:AVCaptureDevicePositionBack];
// 2: 设置输入设备
self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:nil];
// 3: 设置输出内容
if (@available(ios 10.0, *)) {
self.photoOutput = [AVCapturePhotoOutput new];
}else {
self.imageOutput = [AVCapturePhotoOutput new];
}
// 4: 设置流
self.captureSession = [[AVCaptureSession alloc] init];
if ([self.captureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
[self.captureSession setSessionPreset:AVCaptureSessionPresetHigh];
}else if ([self.captureSession canSetSessionPreset:AVCaptureSessionPresetMedium]) {
[self.captureSession setSessionPreset:AVCaptureSessionPresetMedium];
}
if ([self.captureSession canAddInput:self.captureInput]) {
[self.captureSession addInput:self.captureInput];
}
if (@available(ios 10.0, *)) {
if ([self.captureSession canAddOutput:self.photoOutput]) {
[self.captureSession addOutput:self.photoOutput];
}
}else if ([self.captureSession canAddOutput:self.imageOutput]) {
[self.captureSession addOutput:self.imageOutput];
}
// 5: layer
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
self.previewLayer.frame = CGRectMake(, , FFCameraHelperScreenWidth, FFCameraHelperScreenHeight);
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
} /**
更改捕获设备
*/
- (void)FF_ChangeCaptureDevice {
[self FF_PreviewLayerTransform:self.captureDevice.position == AVCaptureDevicePositionBack];
if (self.captureDevice == nil) {
}else if (self.captureDevice.position == AVCaptureDevicePositionBack) {
[self FF_AcquireNeedDevice:AVCaptureDevicePositionFront];
}else {
[self FF_AcquireNeedDevice:AVCaptureDevicePositionBack];
}
[self.captureSession beginConfiguration];
[self.captureSession removeInput:self.captureInput];
self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:nil];
if ([self.captureSession canAddInput:self.captureInput]) {
[self.captureSession addInput:self.captureInput];
}
[self.captureSession commitConfiguration];
} /// 前后摄像头切换动画
- (void)FF_PreviewLayerTransform:(BOOL)isBack {
// 转场动画可以设置的值 系统提供的(type)
// kCATransitionFade 淡出
// kCATransitionMoveIn 覆盖原图
// kCATransitionPush 推出
// kCATransitionReveal底部显出来
// 如果为隐藏的效果,要使用kvc 即【Animation setType:@“”】;
// pageCurl 向上翻一页
// pageUnCurl 向下翻一页
// rippleEffect 滴水效果
// suckEffect 收缩效果,如一块布被抽走
// cube 立方体效果
// oglFlip 上下翻转效果
[self.previewLayer removeAllAnimations];
CATransition *transition = [CATransition animation];
[transition setType:@"oglFlip"];
transition.subtype = isBack ? kCATransitionFromRight : kCATransitionFromLeft;
transition.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut];
transition.duration = 0.27;
[self.previewLayer addAnimation:transition forKey:@"filp"]; } /**
开始捕获
*/
- (void)FF_StartCapture {
[self.captureSession startRunning];
} - (void)FF_AcquireNeedDevice:(AVCaptureDevicePosition)position {
self.captureDevice = nil;
if (@available(ios 10.0, *)) {
self.captureDeviceList = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position].devices;
for (AVCaptureDevice *device in self.captureDeviceList) {
if (device.position == position) {
self.captureDevice = device;
return;
}
}
}else {
self.captureDeviceList = [AVCaptureDevice devices];
for (AVCaptureDevice *device in self.captureDeviceList) {
if (device.position == position) {
self.captureDevice = device;
return;
}
}
}
} /// 获取照片
- (void)FF_AcquireNeedImage {
AVCaptureConnection *connection = nil;
if (@available(ios 10.0, *)) {
connection = [self.photoOutput connectionWithMediaType:AVMediaTypeAudio];
}else {
connection = [self.imageOutput connectionWithMediaType:AVMediaTypeAudio];
}
if (!connection) {
NSLog(@"拍照失败");
}
if (@available(ios 10.0, *)) {
[self.photoOutput capturePhotoWithSettings:[AVCapturePhotoSettings photoSettings] delegate:self];
}else {
__block UIImage *img = nil;
[self.imageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^(CMSampleBufferRef _Nullable imageDataSampleBuffer, NSError * _Nullable error) {
if (imageDataSampleBuffer != nil) {
NSData *data = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
img = [UIImage imageWithData:data];
}
}];
if (self.delegate && [self.delegate respondsToSelector:@selector(FF_CaptureImage:)]) {
[self.delegate FF_CaptureImage:img];
}
}
} /**
ios 11.0 后输出的图片(拍照)
*/
- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(nullable NSError *)error {
if (error == nil) {
NSData *data = [photo fileDataRepresentation];
UIImage *img = [UIImage imageWithData:data];
if (self.delegate && [self.delegate respondsToSelector:@selector(FF_CaptureImage:)]) {
[self.delegate FF_CaptureImage:img];
}
}
}
/**
ios 10.0 后输出的图片(拍照)
*/
- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhotoSampleBuffer:(nullable CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(nullable AVCaptureBracketedStillImageSettings *)bracketSettings error:(nullable NSError *)error {
if (error) {
NSLog(@"%@", error.localizedDescription);
}else {
NSData *data = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer previewPhotoSampleBuffer:previewPhotoSampleBuffer];
UIImage *img = [UIImage imageWithData:data];
if (self.delegate && [self.delegate respondsToSelector:@selector(FF_CaptureImage:)]) {
[self.delegate FF_CaptureImage:img];
}
}
} /**
设置曝光和自动对焦 ,有问题 @param point 对焦重点
*/
- (void)FF_SetupFocusAndWhiteBalance:(CGPoint)point {
NSError *error = nil;
[self.captureDevice lockForConfiguration:&error];
if (error) {
NSLog(@"%@", error.localizedDescription);
return;
}
/// 设置白平衡
if ([self.captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeAutoWhiteBalance]) {
self.captureDevice.whiteBalanceMode = AVCaptureWhiteBalanceModeAutoWhiteBalance;
}
/// 设置闪光灯
if (@available(ios 10.0, *)) {
}else {
if ([self.captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) {
self.captureDevice.flashMode = AVCaptureFlashModeAuto;
}
}
/// 设置焦点
if ([self.captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
self.captureDevice.focusPointOfInterest = CGPointMake(point.y / FFCameraHelperScreenHeight, - point.x / FFCameraHelperScreenWidth);
self.captureDevice.focusMode = AVCaptureFocusModeAutoFocus;
}
/// 设置曝光
if ([self.captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
self.captureDevice.exposurePointOfInterest = CGPointMake(point.y / FFCameraHelperScreenHeight, - point.x / FFCameraHelperScreenWidth);
self.captureDevice.exposureMode = AVCaptureExposureModeAutoExpose;
}
[self.captureDevice unlockForConfiguration];
/// 对焦动画
// self.focusLayer.hidden = NO;
// [UIView animateWithDuration:3 animations:^{
// UIBezierPath *path = [UIBezierPath bezierPathWithRect:CGRectMake(point.x - 60, point.y - 60, 120, 120)];
// self.focusLayer.path = path.CGPath;
// } completion:^(BOOL finished) {
// UIBezierPath *path = [UIBezierPath bezierPathWithRect:CGRectMake(point.x - 40, point.y - 40, 80, 80)];
// self.focusLayer.path = path.CGPath;
// }]; // [UIView animateWithDuration:1 animations:^{
// self.focusLayer.transform = CATransform3DMakeScale(1.25, 1.25, 1.0);
// } completion:^(BOOL finished) {
// [UIView animateWithDuration:1 animations:^{
// self.focusLayer.transform = CATransform3DIdentity;
// self.focusLayer.hidden = YES;
// [self.focusLayer removeFromSuperlayer];
// }];
// }]; } /**
对焦时的提示框
*/
- (void)FF_SetUpFocusLayerWithPoint:(CGPoint)point {
if (self.focusLayer == nil) {
self.focusLayer = [CAShapeLayer layer];
self.focusLayer.strokeColor = [UIColor lightGrayColor].CGColor;
self.focusLayer.fillColor = [UIColor clearColor].CGColor;
self.focusLayer.lineDashPhase = ;
self.focusLayer.lineWidth = ;
self.focusLayer.lineCap = kCALineCapRound;
}else {
[self.focusLayer removeFromSuperlayer];
}
CGFloat radiu = FFCameraHelperFocusLayerWidth / ;
if (point.x - radiu < ) {
point.x = radiu;
}else if (point.x + radiu > FFCameraHelperScreenWidth) {
point.x = FFCameraHelperScreenWidth - radiu;
} if (point.y - radiu < ) {
point.y = radiu + ;
}else if (point.y + radiu + > FFCameraHelperScreenHeight) {
point.y = FFCameraHelperScreenHeight - - radiu;
} UIBezierPath *path = [UIBezierPath bezierPathWithRect:CGRectMake(point.x - radiu, point.y - radiu, radiu * , radiu * )];
self.focusLayer.path = path.CGPath;
[self.previewLayer addSublayer:self.focusLayer];
[self FF_SetupFocusAndWhiteBalance:point];
} @end
AVFoundation - 拍照(Simple)的更多相关文章
- iOS开发系列--音频播放、录音、视频播放、拍照、视频录制
--iOS多媒体 概览 随着移动互联网的发展,如今的手机早已不是打电话.发短信那么简单了,播放音乐.视频.录音.拍照等都是很常用的功能.在iOS中对于多媒体的支持是非常强大的,无论是音视频播放.录制, ...
- iOS开发----音频播放、录音、视频播放、拍照、视频录制
随着移动互联网的发展,如今的手机早已不是打电话.发短信那么简单了,播放音乐.视频.录音.拍照等都是很常用的功能.在iOS中对于多媒体的支持是非常强大的,无论是音视频播放.录制,还是对麦克风.摄像头的操 ...
- 音频播放、录音、视频播放、拍照、视频录制-b
随着移动互联网的发展,如今的手机早已不是打电话.发短信那么简单了,播放音乐.视频.录音.拍照等都是很常用的功能.在iOS中对于多媒体的支持是非常强大的,无论是音视频播放.录制,还是对麦克风.摄像头的操 ...
- iOS音频播放、录音、视频播放、拍照、视频录制
随着移动互联网的发展,如今的手机早已不是打电话.发短信那么简单了,播放音乐.视频.录音.拍照等都是很常用的功能.在iOS中对于多媒体的支持是非常强大的,无论是音视频播放.录制,还是对麦克风.摄像头的操 ...
- iOS开发之多媒体播放
iOS开发之多媒体播放 iOS sdk中提供了很多方便的方法来播放多媒体.本文将利用这些SDK做一个demo,来讲述一下如何使用它们来播放音频文件. AudioToolbox framework 使用 ...
- iOS开发直播需要的准备
这里我们要研究直播技术首先需要对AVFoundation熟悉掌握 AVFoundation拍照和录制视频 AVFoundation中提供了很多现成的播放器和录音机,但是事实上它还有更加底层的内容可以供 ...
- IOS多媒体
概览 随着移动互联网的发展,如今的手机早已不是打电话.发短信那么简单了,播放音乐.视频.录音.拍照等都是很常用的功能.在iOS中对于多媒体的支持是非常强大的,无论是音视频播放.录制,还是对麦克风.摄像 ...
- 《转》iOS音频视频初级开发
代码改变世界 Posts - 73, Articles - 0, Comments - 1539 Cnblogs Dashboard Logout HOME CONTACT GALLERY RSS ...
- iOS音频
随着移动互联网的发展,如今的手机早已不是打电话.发短信那么简单了,播放音乐.视频.录音.拍照等都是很常用的功能.在iOS中对于多媒体的支持是非常强大的,无论是音视频播放.录制,还是对麦克风.摄像头的操 ...
随机推荐
- winform弹出文件和目录选择框
目录选择: FolderBrowserDialog dialog = new FolderBrowserDialog(); dialog.Description = "请选择文件路径&quo ...
- Linux wget命令
一.简介 wget是一个Linux系统中的下载文件的工具,它用在命令行下.对于Linux用户是必不可少的工具,我们经常要下载一些软件或从远程服务器恢复备份到本地服务器.wget支持HTTP,HTTPS ...
- Winpython环境下mayavi配置
Winpython环境下mayavi配置 在pythonxy中会直接有mayavi软件包,但是所附带的杂包实在太多.本人一直用的是window下的winpython或者linux下的anaconda来 ...
- Product of integers
https://github.com/Premiumlab/Python-for-Algorithms--Data-Structures--and-Interviews/blob/master/Moc ...
- 2018.10.14 loj#6011. 「网络流 24 题」运输问题(费用流)
传送门 费用流入门题. 直接按照题意模拟. 把货物的数量当做容量建边. 然后跑一次最小费用流和最大费用流就行了. 代码: #include<bits/stdc++.h> #define N ...
- 2018.09.09 cogs693. Antiprime数(搜索)
传送门 看完题发现很sb. 前10个质数乘起来已经超出题目范围了. 因此只用搜索前几个质数每个的次数比较谁的因数的就行了. 代码: #include<iostream> #define l ...
- 2018.08.10 atcoder No Need(线性dp)
传送门 输入一个序列an" role="presentation" style="position: relative;">anan,输入k&q ...
- [转]Go与C语言的互操作
Go有强烈的C背景,除了语法具有继承性外,其设计者以及其设计目标都与C语言有着千丝万缕的联系.在Go与C语言互操作(Interoperability)方面,Go更是提供了强大的支持.尤其是在Go中使用 ...
- UESTC 486 Good Morning (水题+坑!)
题意:给你一行字符串,让你找其中蕴含的“good morning"的次数. 析:看起来很水么,多简单,只有统计一下其中字母的出现的次数,然后除以相应的个数. 但是很不幸的是WA,而且是在te ...
- Swift:在Safari中打开App
打开之前会发生什么呢,先看看这个图: 我这里只是简单模拟了一下.当你输入一个特殊的“url”之后,Safari弹出一个提示,问你是否继续打开这个App.如果你这个时候confirm的话.那么这个App ...