iOS使用视听媒体框架AVFoundation实现照片拍摄

用系统自带的视听媒体的框架,AVFoundation实现照片拍摄。相比UIKit框架(UIImagePickerController高度封装),AVFoundation框架让开发者有更大的发挥空间。

首先看一下效果图:

下面贴上核心控制器代码:

#import "HWPhotoVC.h"
#import <AVFoundation/AVFoundation.h>

@interface HWPhotoVC ()

@property (nonatomic, strong) AVCaptureSession *captureSession;//负责输入和输出设备之间的数据传递
@property (nonatomic, strong) AVCaptureDeviceInput *captureDeviceInput;//负责从AVCaptureDevice获得输入数据
@property (nonatomic, strong) AVCaptureStillImageOutput *captureStillImageOutput;//照片输出流
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;//相机拍摄预览图层
@property (nonatomic, weak) UIView *containerView;//内容视图
@property (nonatomic, weak) UIImageView *focusCursor;//聚焦按钮
@property (nonatomic, weak) UIImageView *imgView;//拍摄照片

@end

@implementation HWPhotoVC

- (void)viewDidLoad {
 [super viewDidLoad];

 self.navigationItem.title = @"拍照";
 self.view.backgroundColor = [UIColor whiteColor];

 //创建控件
 [self creatControl];
}

- (void)viewWillAppear:(BOOL)animated
{
 [super viewWillAppear:animated];

 //初始化信息
 [self initPhotoInfo];
}

- (void)viewDidAppear:(BOOL)animated
{
 [super viewDidAppear:animated];

 [self.captureSession startRunning];
}

- (void)viewDidDisappear:(BOOL)animated
{
 [super viewDidDisappear:animated];

 [self.captureSession stopRunning];
}

- (void)creatControl
{
 CGFloat btnW = 150.f;
 CGFloat btnH = 40.f;
 CGFloat marginY = 20.f;
 CGFloat w = [UIScreen mainScreen].bounds.size.width;
 CGFloat h = [UIScreen mainScreen].bounds.size.height;

 //内容视图
 CGFloat containerViewH = h - 64 - btnH - marginY * 3;
 UIView *containerView = [[UIView alloc] initWithFrame:CGRectMake(10, 64 + marginY, w - 20, containerViewH)];
 containerView.backgroundColor = [UIColor whiteColor];
 containerView.layer.borderWidth = 1.f;
 containerView.layer.borderColor = [[UIColor grayColor] CGColor];
 [self.view addSubview:containerView];
 _containerView = containerView;

 //摄像头切换按钮
 CGFloat cameraSwitchBtnW = 50.f;
 CGFloat cameraSwitchBtnMargin = 10.f;
 UIButton *cameraSwitchBtn = [[UIButton alloc] initWithFrame:CGRectMake(containerView.bounds.size.width - cameraSwitchBtnW - cameraSwitchBtnMargin, 64 + marginY + cameraSwitchBtnMargin, cameraSwitchBtnW, cameraSwitchBtnW)];
 [cameraSwitchBtn setImage:[UIImage imageNamed:@"camera_switch"] forState:UIControlStateNormal];
 [cameraSwitchBtn addTarget:self action:@selector(cameraSwitchBtnOnClick) forControlEvents:UIControlEventTouchUpInside];
 [self.view addSubview:cameraSwitchBtn];

 //聚焦图片
 UIImageView *focusCursor = [[UIImageView alloc] initWithFrame:CGRectMake(50, 50, 75, 75)];
 focusCursor.alpha = 0;
 focusCursor.image = [UIImage imageNamed:@"camera_focus_red"];
 [containerView addSubview:focusCursor];
 _focusCursor = focusCursor;

 //拍摄照片容器
 UIImageView *imgView = [[UIImageView alloc] initWithFrame:containerView.frame];
 imgView.hidden = YES;
 imgView.layer.borderWidth = 1.f;
 imgView.layer.borderColor = [[UIColor grayColor] CGColor];
 imgView.contentMode = UIViewContentModeScaleAspectFill;
 imgView.clipsToBounds = YES;
 [self.view addSubview:imgView];
 _imgView = imgView;

 //按钮
 NSArray *titleArray = @[@"拍摄照片", @"重新拍摄"];
 CGFloat btnY = CGRectGetMaxY(containerView.frame) + marginY;
 CGFloat margin = (w - btnW * titleArray.count) / (titleArray.count + 1);
 for (int i = 0; i < titleArray.count; i++) {
  CGFloat btnX = margin + (margin + btnW) * i;
  UIButton *btn = [[UIButton alloc] initWithFrame:CGRectMake(btnX, btnY, btnW, btnH)];
  btn.tag = 1000 + i;
  [btn setTitle:titleArray[i] forState:UIControlStateNormal];
  btn.backgroundColor = [UIColor orangeColor];
  btn.layer.cornerRadius = 2.0f;
  btn.layer.masksToBounds = YES;
  if (i == 1) {
   btn.hidden = YES;
  }
  [btn addTarget:self action:@selector(btnOnClick:) forControlEvents:UIControlEventTouchUpInside];
  [self.view addSubview:btn];
 }
}

- (void)initPhotoInfo
{
 //初始化会话
 _captureSession = [[AVCaptureSession alloc] init];

 //设置分辨率
 if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
  _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
 }

 //获得输入设备,取得后置摄像头
 AVCaptureDevice *captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
 if (!captureDevice) {
  NSLog(@"取得后置摄像头时出现问题");
  return;
 }

 NSError *error = nil;
 //根据输入设备初始化设备输入对象,用于获得输入数据
 _captureDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:&error];
 if (error) {
  NSLog(@"取得设备输入对象时出错,错误原因:%@", error.localizedDescription);
  return;
 }

 //初始化设备输出对象,用于获得输出数据
 _captureStillImageOutput = [[AVCaptureStillImageOutput alloc] init];
 NSDictionary *outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};
 //输出设置
 [_captureStillImageOutput setOutputSettings:outputSettings];

 //将设备输入添加到会话中
 if ([_captureSession canAddInput:_captureDeviceInput]) {
  [_captureSession addInput:_captureDeviceInput];
 }

 //将设备输出添加到会话中
 if ([_captureSession canAddOutput:_captureStillImageOutput]) {
  [_captureSession addOutput:_captureStillImageOutput];
 }

 //创建视频预览层,用于实时展示摄像头状态
 _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];

 //摄像头方向
 AVCaptureConnection *captureConnection = [self.captureVideoPreviewLayer connection];
 captureConnection.videoOrientation = AVCaptureVideoOrientationPortrait;

 CALayer *layer = _containerView.layer;
 layer.masksToBounds = YES;

 _captureVideoPreviewLayer.frame = layer.bounds;
 //填充模式
 _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
 //将视频预览层添加到界面中
 [layer insertSublayer:_captureVideoPreviewLayer below:self.focusCursor.layer];

 [self addNotificationToCaptureDevice:captureDevice];
 [self addGenstureRecognizer];
}

- (void)btnOnClick:(UIButton *)btn
{
 if (btn.tag == 1000) {
  //拍摄照片
  [self photoBtnOnClick];

 }else if (btn.tag == 1001) {
  //重新拍摄
  [self resetPhoto];
 }
}

#pragma mark 拍照
- (void)photoBtnOnClick
{
 //根据设备输出获得连接
 AVCaptureConnection *captureConnection = [self.captureStillImageOutput connectionWithMediaType:AVMediaTypeVideo];
 captureConnection.videoOrientation = AVCaptureVideoOrientationPortrait;

 //根据连接取得设备输出的数据
 [self.captureStillImageOutput captureStillImageAsynchronouslyFromConnection:captureConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
  if (imageDataSampleBuffer) {
   NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
   UIImage *image = [UIImage imageWithData:imageData];
   _imgView.image = image;
   _imgView.hidden = NO;
  }
 }];

 UIButton *btn = (UIButton *)[self.view viewWithTag:1001];
 btn.hidden = NO;
}

//重新拍摄
- (void)resetPhoto
{
 _imgView.hidden = YES;
 UIButton *btn = (UIButton *)[self.view viewWithTag:1001];
 btn.hidden = YES;
}

#pragma mark - 通知
//给输入设备添加通知
- (void)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice
{
 //注意添加区域改变捕获通知必须首先设置设备允许捕获
 [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
  captureDevice.subjectAreaChangeMonitoringEnabled = YES;
 }];

 //捕获区域发生改变
 [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}

- (void)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice
{
 [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}

//移除所有通知
- (void)removeNotification
{
 [[NSNotificationCenter defaultCenter] removeObserver:self];
}

//设备连接成功
- (void)deviceConnected:(NSNotification *)notification
{
 NSLog(@"设备已连接...");
}

//设备连接断开
- (void)deviceDisconnected:(NSNotification *)notification
{
 NSLog(@"设备已断开.");
}

//捕获区域改变
- (void)areaChange:(NSNotification *)notification
{
 NSLog(@"捕获区域改变...");
}

#pragma mark - 私有方法
//取得指定位置的摄像头
- (AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position
{
 NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
 for (AVCaptureDevice *camera in cameras) {
  if ([camera position] == position) {
   return camera;
  }
 }

 return nil;
}

#pragma mark 切换前后摄像头
- (void)cameraSwitchBtnOnClick
{
 AVCaptureDevice *currentDevice = [self.captureDeviceInput device];
 AVCaptureDevicePosition currentPosition = [currentDevice position];
 [self removeNotificationFromCaptureDevice:currentDevice];

 AVCaptureDevice *toChangeDevice;
 AVCaptureDevicePosition toChangePosition = AVCaptureDevicePositionFront;
 if (currentPosition == AVCaptureDevicePositionUnspecified || currentPosition == AVCaptureDevicePositionFront) {
  toChangePosition = AVCaptureDevicePositionBack;
 }
 toChangeDevice = [self getCameraDeviceWithPosition:toChangePosition];
 [self addNotificationToCaptureDevice:toChangeDevice];
 //获得要调整的设备输入对象
 AVCaptureDeviceInput *toChangeDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:toChangeDevice error:nil];

 //改变会话的配置前一定要先开启配置,配置完成后提交配置改变
 [self.captureSession beginConfiguration];
 //移除原有输入对象
 [self.captureSession removeInput:self.captureDeviceInput];
 //添加新的输入对象
 if ([self.captureSession canAddInput:toChangeDeviceInput]) {
  [self.captureSession addInput:toChangeDeviceInput];
  self.captureDeviceInput = toChangeDeviceInput;
 }
 //提交会话配置
 [self.captureSession commitConfiguration];
}

//改变设备属性的统一操作方法
- (void)changeDeviceProperty:(void (^)(AVCaptureDevice *))propertyChange
{
 AVCaptureDevice *captureDevice = [self.captureDeviceInput device];
 NSError *error;
 //注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁
 if ([captureDevice lockForConfiguration:&error]) {
  propertyChange(captureDevice);
  [captureDevice unlockForConfiguration];

 }else {
  NSLog(@"设置设备属性过程发生错误,错误信息:%@", error.localizedDescription);
 }
}

//设置闪光灯模式
- (void)setFlashMode:(AVCaptureFlashMode)flashMode
{
 [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
  if ([captureDevice isFlashModeSupported:flashMode]) {
   [captureDevice setFlashMode:flashMode];
  }
 }];
}

//设置聚焦模式
- (void)setFocusMode:(AVCaptureFocusMode)focusMode
{
 [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
  if ([captureDevice isFocusModeSupported:focusMode]) {
   [captureDevice setFocusMode:focusMode];
  }
 }];
}

//设置曝光模式
- (void)setExposureMode:(AVCaptureExposureMode)exposureMode
{
 [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
  if ([captureDevice isExposureModeSupported:exposureMode]) {
   [captureDevice setExposureMode:exposureMode];
  }
 }];
}

//设置聚焦点
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point
{
 [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
  if ([captureDevice isFocusModeSupported:focusMode]) {
   [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
  }
  if ([captureDevice isFocusPointOfInterestSupported]) {
   [captureDevice setFocusPointOfInterest:point];
  }
  if ([captureDevice isExposureModeSupported:exposureMode]) {
   [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
  }
  if ([captureDevice isExposurePointOfInterestSupported]) {
   [captureDevice setExposurePointOfInterest:point];
  }
 }];
}

//添加点按手势,点按时聚焦
- (void)addGenstureRecognizer
{
 [self.containerView addGestureRecognizer:[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapScreen:)]];
}

- (void)tapScreen:(UITapGestureRecognizer *)tapGesture
{
 CGPoint point = [tapGesture locationInView:self.containerView];
 //将UI坐标转化为摄像头坐标
 CGPoint cameraPoint = [self.captureVideoPreviewLayer captureDevicePointOfInterestForPoint:point];
 [self setFocusCursorWithPoint:point];
 [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint];
}

//设置聚焦光标位置
- (void)setFocusCursorWithPoint:(CGPoint)point
{
 self.focusCursor.center = point;
 self.focusCursor.transform = CGAffineTransformMakeScale(1.5, 1.5);
 self.focusCursor.alpha = 1.0;
 [UIView animateWithDuration:1.0 animations:^{
  self.focusCursor.transform = CGAffineTransformIdentity;
 } completion:^(BOOL finished) {
  self.focusCursor.alpha = 0;
 }];
}

- (void)dealloc
{
 [self removeNotification];
}

@end

Demo下载链接

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持我们。

(0)

相关推荐

  • iOS实现微信/QQ显示最近拍摄图片的功能实例代码

    如果你刚刚拍摄了图片,在使用微信/QQ发生消息时会显示"你可能要发送的图片", 实现原理: 1.打开或重新进入聊天窗口时查询图库最新的照片, 对比拍照时间和当前时间的差,当低于阈值(例如一分钟)时就显示出来. PS:阈值是逻辑上判断是否最近的依据.优点:总能找到最近拍摄的图片: 缺点:每次都要查询图片数据,响应较慢. 2.注册图库变化监听(观察者模式), 响应图库的增删改事件, 拿到变化图片数据后做对应的逻辑. 优点: 实时响应: 缺点:影响性能, 在注册监听前拿不到变化数据. 实现方

  • IOS获取系统相册中照片的示例代码

    先来看看效果图 下面话不多少,我们直接上代码: #import "ViewController.h" @interface ViewController ()<UINavigationControllerDelegate,UIImagePickerControllerDelegate> @property (weak, nonatomic) IBOutlet UIImageView *IconView; @end @implementation ViewController

  • iOS开发-调用系统相机和相册获取照片示例

    前言:相信大家都知道大部分的app都是有我的模块的,而在我的模块基本都有用户的头像等信息,并且是可以更改头像的.那么今天小编给大家简单介绍一下iOS开发中如何调用系统相机拍照或者相册获取照片.要获取系统相机或者相册,我们需要使用到 UIImagePickerController 这个类.下面我们来看一下如何实现: 首先,需要遵循 UIImagePickerController 代理的两个协议: <UIImagePickerControllerDelegate, UINavigationContr

  • IOS 照片操作(获取信息及修改照片)详解

    IOS 照片操作(获取信息及修改照片) 最近客户想要一个照片水印,并修改其他内容包括获取位置,和修改事件等,哦,可以的呢,我想起了uiview变UIimage 的方法可以的 <span style="font-size:14px;">- (UIImage*)convertViewToImage:(UIView*)v { CGSize s = v.bounds.size; UIGraphicsBeginImageContextWithOptions(s, NO, [UIScr

  • iOS 通过collectionView实现照片删除功能

    一,效果图. 二,工程图. 三,代码. ViewController.h #import <UIKit/UIKit.h> @interface ViewController : UIViewController <UICollectionViewDataSource,UICollectionViewDelegate,UICollectionViewDelegateFlowLayout,UIAlertViewDelegate,UIActionSheetDelegate,UIImagePic

  • iOS使用视听媒体框架AVFoundation实现照片拍摄

    用系统自带的视听媒体的框架,AVFoundation实现照片拍摄.相比UIKit框架(UIImagePickerController高度封装),AVFoundation框架让开发者有更大的发挥空间. 首先看一下效果图: 下面贴上核心控制器代码: #import "HWPhotoVC.h" #import <AVFoundation/AVFoundation.h> @interface HWPhotoVC () @property (nonatomic, strong) AV

  • iOS框架AVFoundation实现相机拍照、录制视频

    本文实例为大家分享了使用AVFoundation框架实现相机拍照.录制视频的具体代码,供大家参考,具体内容如下 这里是Demo 首先声明以下对象: #import "CustomeCameraViewController.h" #import <AVFoundation/AVFoundation.h> #import <AssetsLibrary/AssetsLibrary.h> @interface CustomeCameraViewController ()

  • iOS使用音频处理框架The Amazing Audio Engine实现音频录制播放

    iOS 第三方音频框架The Amazing Audio Engine使用,实现音频录制.播放,可设置配乐. 首先看一下效果图: 下面贴上核心控制器代码: #import "ViewController.h" #import <AVFoundation/AVFoundation.h> #import "HWProgressHUD.h" #import "UIImage+HW.h" #import "AERecorder.h&

  • IOS 陀螺仪开发(CoreMotion框架)实例详解

    iOS陀螺仪 参数意义 self.mManager = [[CMMotionManager alloc]init]; self.mManager.deviceMotionUpdateInterval = 0.5; if (self.mManager.gyroAvailable) { [self.mManager startDeviceMotionUpdatesToQueue:[NSOperationQueue currentQueue] withHandler:^(CMDeviceMotion

  • iOS中使用JSPatch框架使Objective-C与JavaScript代码交互

    JSPatch是GitHub上一个开源的框架,其可以通过Objective-C的run-time机制动态的使用JavaScript调用与替换项目中的Objective-C属性与方法.其框架小巧,代码简洁,并且通过系统的JavaScriptCore框架与Objective-C进行交互,这使其在安全性和审核风险上都有很强的优势.Git源码地址:https://github.com/bang590/JSPatch. 一.从一个官方的小demo看起 通过cocoapods将JSPath集成进一个Xcod

  • iOS开发之AssetsLibrary框架使用详解

    一.引言 AssetsLibrary框架是专门用来操作相册相关资源的一个框架,其是iOS4到iOS9之间常使用的一个框架,在iOS9之后,系统系统了Photos框架代替了AssetsLibrary框架,但是AssetsLibrary框架依然可以使用,并且其结构和设计思路依然值得我们进行分析学习. 二.概述 AssetsLibrary框架会操作系统的相册,因此首先需要进行权限的申请,在使用之前,首先需要在Info.plist文件中添加如下键值: Privacy - Photo Library Us

  • iOS开发使用GDataXML框架解析网络数据

    前言:GDataXML是google基于C语言写的第三方框架,该源码文件就一个类,看其源码,基本使用了C语言的底层的很多lib编译库代码,所以刚导入使用,会报错提示需要设置导入需要的链接库. 另外,该第三方框架并没有纳入Cocoapods,所以通过pod搜索不到这个框架. 1.使用GDataXML框架,将GDataXML框架导入到工程中.下载链接:http://xiazai.jb51.net/201602/yuanma/GDataXML(jb51.net).zip.然后先编译一下,会有错误提示,

  • 详解iOS的Core Animation框架中的CATransform3D图形变换

    一.矩阵坐标 CATransform3D定义了一个变化矩阵,通过对矩阵参数的设置,我们可以改变layer的一些属性,这个属性的改变,可以产生动画的效果. CATransform3D CATransform3DMakeTranslation (CGFloat tx, CGFloat ty, CGFloat tz) tx:X轴偏移位置,往下为正数. ty:Y轴偏移位置,往右为正数. tz:Z轴偏移位置,往外为正数. 例: 如果有2个图层,一个是绿色的,一个是红色的.先加载绿色,后加载红色. tx,t

  • iOS 图片加载框架SDWebImage解读

    目的 在使用SDWebImage加载图片时,尤其是加载gif等大图时,SDWebImage会将图片缓存在内存中,这样是非常吃内存的,这时我们就需要在适当的时候去释放一下SDWebImage的内存缓存,才不至于造成APP闪退. SDWebImage提供了 UIImageView.UIButton .MKAnnotationView 的图片下载分类,只要一行代码就可以实现图片异步下载和缓存功能. 这样开发者就无须花太多精力在图片下载细节上,专心处理业务逻辑. SDWebImage 特点 提供 UII

  • iOS基于AVFoundation 制作用于剪辑视频项目

    目录 项目效果图 功能实现 一.选取视频并播放 二.按帧获取缩略图初始化视频轨道 三.视频指定时间跳转 四.播放器监听 五.导出视频 最近做了一个剪辑视频的小项目,踩了一些小坑,但还是有惊无险的实现了功能. 其实 Apple 官方也给了一个 UIVideoEditController 让我们来做视频的处理,但难以进行扩展或者自定义,所以咱们就用 Apple 给的一个框架 AVFoundation 来开发自定义的视频处理. 而且发现网上并没有相关的并且比较系统的资料,于是写下了本文,希望能对也在做

随机推荐