iOS AVCaptureSession实现视频录制功能

本文实例为大家分享了AVCaptureSession实现视频录制功能的具体代码,供大家参考,具体内容如下

#import "RecordingVideoViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h> 

@interface RecordingVideoViewController ()<AVCaptureFileOutputRecordingDelegate> 

//会话 负责输入和输出设备之间的数据传递
@property (strong,nonatomic) AVCaptureSession  *captureSession;
//设备输入 负责从AVCaptureDevice获得输入数据
@property (strong,nonatomic) AVCaptureDeviceInput  *videoCaptureDeviceInput;
@property (strong,nonatomic) AVCaptureDeviceInput  *audioCaptureDeviceInput;
//视频输出流
@property (strong,nonatomic) AVCaptureMovieFileOutput  *captureMovieFileOutput;
//相机拍摄预览图层
@property (strong,nonatomic) AVCaptureVideoPreviewLayer  *captureVideoPreviewLayer; 

//自定义UI控件容器
@property (strong,nonatomic) UIView  *viewContainer;
//聚焦图标
@property (strong,nonatomic) UIImageView  *focusCursor;
//录制时长
@property (strong,nonatomic) UILabel  *timeLabel;
//切换前后摄像头
@property (strong,nonatomic) UIButton  *switchCameraBtn;
//改变焦距
@property (strong,nonatomic) UIButton  *scaleBtn;
//计时器
@property (strong,nonatomic) NSTimer  *timer; 

@end 

@implementation RecordingVideoViewController {
 @private
  NSInteger _num;
  CGFloat _kCameraScale;
} 

- (UIView *)viewContainer {
  if (!_viewContainer) {
    _viewContainer = [[UIView alloc] initWithFrame:[UIScreen mainScreen].bounds]; 

    UIButton *takeButton = [UIButton buttonWithType:UIButtonTypeCustom];
    takeButton.backgroundColor = [UIColor redColor];
    [takeButton setTitle:@"start" forState:UIControlStateNormal];
    [takeButton addTarget:self action:@selector(takeButtonClick:) forControlEvents:UIControlEventTouchUpInside]; 

    _timeLabel = [[UILabel alloc] init];
    _timeLabel.textColor = [UIColor redColor];
    _timeLabel.textAlignment = NSTextAlignmentCenter;
    _timeLabel.font = [UIFont boldSystemFontOfSize:20];
    _timeLabel.text = @"00:00"; 

    _switchCameraBtn = [UIButton buttonWithType:UIButtonTypeCustom];
    [_switchCameraBtn setTitle:@"switch" forState:UIControlStateNormal];
    _switchCameraBtn.backgroundColor = [UIColor redColor];
    [_switchCameraBtn addTarget:self action:@selector(switchCameraBtnClick) forControlEvents:UIControlEventTouchUpInside]; 

    _scaleBtn = [UIButton buttonWithType:UIButtonTypeCustom];
    [_scaleBtn setTitle:@"1X" forState:UIControlStateNormal];
    _scaleBtn.backgroundColor = [UIColor redColor];
    [_scaleBtn addTarget:self action:@selector(scaleBtnClick:) forControlEvents:UIControlEventTouchUpInside]; 

    [_viewContainer addSubview:takeButton];
    [_viewContainer addSubview:_timeLabel];
    [_viewContainer addSubview:_scaleBtn];
    [_viewContainer addSubview:_switchCameraBtn];
    [takeButton mas_makeConstraints:^(MASConstraintMaker *make) {
      make.size.mas_equalTo(CGSizeMake(60, 40));
      make.centerX.mas_equalTo(_viewContainer);
      make.bottom.mas_equalTo(_viewContainer).offset(-64);
    }];
    [_timeLabel mas_makeConstraints:^(MASConstraintMaker *make) {
      make.centerX.mas_equalTo(_viewContainer);
      make.height.mas_equalTo(@30);
      make.top.mas_equalTo(_viewContainer);
    }];
    [_scaleBtn mas_makeConstraints:^(MASConstraintMaker *make) {
      make.size.mas_equalTo(CGSizeMake(60, 40));
      make.left.mas_equalTo(_viewContainer).offset(10);
      make.top.mas_equalTo(_viewContainer);
    }];
    [_switchCameraBtn mas_makeConstraints:^(MASConstraintMaker *make) {
      make.size.mas_equalTo(CGSizeMake(60, 40));
      make.top.mas_equalTo(_viewContainer);
      make.right.mas_equalTo(_viewContainer).offset(-10);
    }]; 

    _focusCursor = [[UIImageView alloc] init];
    kBorder(_focusCursor, 1, [UIColor yellowColor]);
    _focusCursor.alpha = 0;
    [_viewContainer addSubview:self.focusCursor];
    [_focusCursor mas_makeConstraints:^(MASConstraintMaker *make) {
      make.size.mas_equalTo(CGSizeMake(40, 40));
      make.center.mas_equalTo(_viewContainer);
    }]; 

  }
  return _viewContainer;
} 

- (void)viewDidLoad {
  [super viewDidLoad]; 

  self.title = @"视频录制";
  _kCameraScale = 1.0f;
  //初始化会话对象
  _captureSession = [[AVCaptureSession alloc] init];
  if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
    _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
  } 

  NSError *error = nil; 

  //获取视频输入对象
  AVCaptureDevice *videoCaptureDevice = [self cameraDeviceWithPosition:(AVCaptureDevicePositionBack)];
  if (!videoCaptureDevice) {
    NSLog(@"获取后置摄像头失败!");
    return;
  }
  _videoCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoCaptureDevice error:&error];
  if (error) {
    NSLog(@"取得视频设备输入对象时出错");
    return;
  } 

  //获取音频输入对象
  AVCaptureDevice *audioCatureDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
  _audioCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioCatureDevice error:&error];
  if (error) {
    NSLog(@"取得音频设备输入对象时出错");
    return;
  } 

  //初始化设备输出对象
  _captureMovieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; 

  //将设备输入添加到会话中
  if ([_captureSession canAddInput:_videoCaptureDeviceInput]) {
    [_captureSession addInput:_videoCaptureDeviceInput];
    [_captureSession addInput:_audioCaptureDeviceInput]; 

    //防抖功能
    AVCaptureConnection *captureConnection = [_captureMovieFileOutput connectionWithMediaType:AVMediaTypeAudio];
    if ([captureConnection isVideoStabilizationSupported]) {
      captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
    }
  } 

  //将设备输出添加到会话中
  if ([_captureSession canAddOutput:_captureMovieFileOutput]) {
    [_captureSession addOutput:_captureMovieFileOutput];
  } 

  //创建视频预览图层
  _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
  self.viewContainer.layer.masksToBounds = YES;
  _captureVideoPreviewLayer.frame = self.viewContainer.bounds;
  _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  [self.view.layer addSublayer:_captureVideoPreviewLayer]; 

  //显示自定义控件
  [self.view addSubview:self.viewContainer]; 

  //添加点按聚焦手势
  UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)];
  [self.viewContainer addGestureRecognizer:tapGesture]; 

} 

-(void)viewDidAppear:(BOOL)animated{
  [super viewDidAppear:animated];
  [self.captureSession startRunning];
} 

-(void)viewDidDisappear:(BOOL)animated{
  [super viewDidDisappear:animated];
  [self.captureSession stopRunning];
  [self.timer invalidate];
  self.timer = nil;
} 

- (void)viewWillDisappear:(BOOL)animated {
  [super viewWillDisappear:animated];
  [self.captureVideoPreviewLayer setAffineTransform:CGAffineTransformMakeScale(1, 1)];
} 

- (void)didReceiveMemoryWarning {
  [super didReceiveMemoryWarning];
} 

//开始 + 暂停录制
- (void)takeButtonClick:(UIButton *)sender {
  if ([self.captureMovieFileOutput isRecording]) {
    [self.captureMovieFileOutput stopRecording]; 

    [self.navigationController popViewControllerAnimated:YES]; 

  } else {
    AVCaptureConnection *captureConnection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
    captureConnection.videoOrientation = [self.captureVideoPreviewLayer connection].videoOrientation; 

    NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:@"Movie.mov"];
    NSLog(@"%@",filePath);
    [self.captureMovieFileOutput startRecordingToOutputFileURL:[NSURL fileURLWithPath:filePath] recordingDelegate:self]; 

    self.switchCameraBtn.hidden = YES; 

    sender.backgroundColor = [UIColor greenColor];
    [sender setTitle:@"stop" forState:UIControlStateNormal]; 

    self.timer = [NSTimer scheduledTimerWithTimeInterval:1 target:self selector:@selector(timeAction) userInfo:nil repeats:YES];
    [self.timer setFireDate:[NSDate distantPast]];
  }
} 

//切换摄像头
- (void)switchCameraBtnClick {
  AVCaptureDevicePosition currentPosition = self.videoCaptureDeviceInput.device.position;
  AVCaptureDevicePosition toPosition;
  if (currentPosition == AVCaptureDevicePositionUnspecified ||
    currentPosition == AVCaptureDevicePositionFront) {
    toPosition = AVCaptureDevicePositionBack;
  } else {
    toPosition = AVCaptureDevicePositionFront;
  } 

  AVCaptureDevice *toCapturDevice = [self cameraDeviceWithPosition:toPosition];
  if (!toCapturDevice) {
    NSLog(@"获取要切换的设备失败");
    return;
  } 

  NSError *error = nil;
  AVCaptureDeviceInput *toVideoDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:toCapturDevice error:&error];
  if (error) {
    NSLog(@"获取要切换的设备输入失败");
    return;
  } 

  //改变会话配置
  [self.captureSession beginConfiguration]; 

  [self.captureSession removeInput:self.videoCaptureDeviceInput];
  if ([self.captureSession canAddInput:toVideoDeviceInput]) {
    [self.captureSession addInput:toVideoDeviceInput]; 

    self.videoCaptureDeviceInput = toVideoDeviceInput;
  }
  //提交会话配置
  [self.captureSession commitConfiguration];
} 

//点按手势
- (void)tapScreen:(UITapGestureRecognizer *)tap {
  CGPoint point = [tap locationInView:self.viewContainer]; 

  //将界面point对应到摄像头point
  CGPoint cameraPoint = [self.captureVideoPreviewLayer captureDevicePointOfInterestForPoint:point]; 

  //设置聚光动画
  self.focusCursor.center = point;
  self.focusCursor.transform = CGAffineTransformMakeScale(1.5, 1.5);
  self.focusCursor.alpha = 1.0f;
  [UIView animateWithDuration:1 animations:^{
    self.focusCursor.transform = CGAffineTransformIdentity;
  } completion:^(BOOL finished) {
    self.focusCursor.alpha = 0.0f; 

  }]; 

  //设置聚光点坐标
  [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint]; 

} 

/**设置聚焦点*/
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{ 

  AVCaptureDevice *captureDevice= [self.videoCaptureDeviceInput device];
  NSError *error = nil;
  //设置设备属性必须先解锁 然后加锁
  if ([captureDevice lockForConfiguration:&error]) { 

    if ([captureDevice isFocusModeSupported:focusMode]) {
      [captureDevice setFocusMode:focusMode];
    }
    if ([captureDevice isFocusPointOfInterestSupported]) {
      [captureDevice setFocusPointOfInterest:point];
    }
    //    //曝光
    //    if ([captureDevice isExposureModeSupported:exposureMode]) {
    //      [captureDevice setExposureMode:exposureMode];
    //    }
    //    if ([captureDevice isExposurePointOfInterestSupported]) {
    //      [captureDevice setExposurePointOfInterest:point];
    //    }
    //    //闪光灯模式
    //    if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) {
    //      [captureDevice setFlashMode:AVCaptureFlashModeAuto];
    //    } 

    //加锁
    [captureDevice unlockForConfiguration]; 

  }else{
    NSLog(@"设置设备属性过程发生错误,错误信息:%@",error.localizedDescription);
  }
} 

//调整焦距
-(void)scaleBtnClick:(UIButton *)sender
{
  _kCameraScale += 0.5;
  if(_kCameraScale > 3.0) {
    _kCameraScale = 1.0;
  }
  //改变焦距
  AVCaptureDevice *videoDevice = self.videoCaptureDeviceInput.device;
  NSError *error = nil;
  if ([videoDevice lockForConfiguration:&error]) { 

    [videoDevice setVideoZoomFactor:_kCameraScale]; 

    [videoDevice unlockForConfiguration]; 

    [sender setTitle:[NSString stringWithFormat:@"%lgX",_kCameraScale] forState:UIControlStateNormal]; 

    [CATransaction begin];
    [CATransaction setAnimationDuration:0.25];
    [self.captureVideoPreviewLayer setAffineTransform:CGAffineTransformMakeScale(_kCameraScale, _kCameraScale)];
    [CATransaction commit]; 

  } else {
    NSLog(@"修改设备属性失败!")
  }
} 

#pragma mark -------- AVCaptureFileOutputRecordingDelegate ----------
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections {
  NSLog(@"开始录制");
} 

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
  NSLog(@"录制结束");
  ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
  [assetsLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
    if (error) {
      NSLog(@"保存视频到相簿过程中发生错误,错误信息:%@",error.localizedDescription);
    }
  }];
} 

//录制计时
- (void)timeAction {
  self.timeLabel.text = [NSString stringWithFormat:@"%.2ld:%.2ld",_num/60,_num%60];
  _num ++;
} 

/**取得指定位置的摄像头*/
- (AVCaptureDevice *)cameraDeviceWithPosition:(AVCaptureDevicePosition )position{
  NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  for (AVCaptureDevice *camera in cameras) {
    if ([camera position] == position) {
      return camera;
    }
  }
  return nil;
} 

@end

参考代码:

#import "VideoTestViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h> 

typedef void(^PropertyChangeBlock)(AVCaptureDevice *captureDevice); 

@interface VideoTestViewController ()<AVCaptureFileOutputRecordingDelegate>//视频文件输出代理 

@property (strong,nonatomic) AVCaptureSession *captureSession;//负责输入和输出设备之间的数据传递
@property (strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput;//负责从AVCaptureDevice获得输入数据
@property (strong,nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput;//视频输出流
@property (strong,nonatomic) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;//相机拍摄预览图层 

@property (assign,nonatomic) BOOL enableRotation;//是否允许旋转(注意在视频录制过程中禁止屏幕旋转)
@property (assign,nonatomic) CGRect *lastBounds;//旋转的前大小
@property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier;//后台任务标识
@property (strong,nonatomic) UIView *viewContainer;
@property (strong,nonatomic) UIButton *takeButton;//拍照按钮
@property (strong,nonatomic) UIImageView *focusCursor; //聚焦光标 

@end 

@implementation VideoTestViewController 

#pragma mark - 控制器视图方法
- (void)viewDidLoad {
  [super viewDidLoad];
} 

-(void)viewWillAppear:(BOOL)animated{
  [super viewWillAppear:animated]; 

  //初始化会话
  _captureSession=[[AVCaptureSession alloc]init];
  if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {//设置分辨率
    _captureSession.sessionPreset=AVCaptureSessionPreset1280x720;
  }
  //获得输入设备
  AVCaptureDevice *captureDevice=[self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];//取得后置摄像头
  if (!captureDevice) {
    NSLog(@"取得后置摄像头时出现问题.");
    return;
  }
  //添加一个音频输入设备
  AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject]; 

  NSError *error=nil;
  //根据输入设备初始化设备输入对象,用于获得输入数据
  _captureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:&error];
  if (error) {
    NSLog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription);
    return;
  }
  AVCaptureDeviceInput *audioCaptureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:audioCaptureDevice error:&error];
  if (error) {
    NSLog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription);
    return;
  }
  //初始化设备输出对象,用于获得输出数据
  _captureMovieFileOutput=[[AVCaptureMovieFileOutput alloc]init]; 

  //将设备输入添加到会话中
  if ([_captureSession canAddInput:_captureDeviceInput]) {
    [_captureSession addInput:_captureDeviceInput];
    [_captureSession addInput:audioCaptureDeviceInput];
    AVCaptureConnection *captureConnection=[_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
    if ([captureConnection isVideoStabilizationSupported ]) {
      captureConnection.preferredVideoStabilizationMode=AVCaptureVideoStabilizationModeAuto;
    }
  } 

  //将设备输出添加到会话中
  if ([_captureSession canAddOutput:_captureMovieFileOutput]) {
    [_captureSession addOutput:_captureMovieFileOutput];
  } 

  //创建视频预览层,用于实时展示摄像头状态
  _captureVideoPreviewLayer=[[AVCaptureVideoPreviewLayer alloc]initWithSession:self.captureSession]; 

  CALayer *layer=self.viewContainer.layer;
  layer.masksToBounds=YES; 

  _captureVideoPreviewLayer.frame=layer.bounds;
  _captureVideoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;//填充模式
  //将视频预览层添加到界面中
  //[layer addSublayer:_captureVideoPreviewLayer];
  [layer insertSublayer:_captureVideoPreviewLayer below:self.focusCursor.layer]; 

  _enableRotation=YES;
  [self addNotificationToCaptureDevice:captureDevice];
  [self addGenstureRecognizer];
} 

-(void)viewDidAppear:(BOOL)animated{
  [super viewDidAppear:animated];
  [self.captureSession startRunning];
} 

-(void)viewDidDisappear:(BOOL)animated{
  [super viewDidDisappear:animated];
  [self.captureSession stopRunning];
} 

- (void)didReceiveMemoryWarning {
  [super didReceiveMemoryWarning];
} 

-(BOOL)shouldAutorotate{
  return self.enableRotation;
} 

////屏幕旋转时调整视频预览图层的方向
//-(void)willTransitionToTraitCollection:(UITraitCollection *)newCollection withTransitionCoordinator:(id<UIViewControllerTransitionCoordinator>)coordinator{
//  [super willTransitionToTraitCollection:newCollection withTransitionCoordinator:coordinator];
////  NSLog(@"%i,%i",newCollection.verticalSizeClass,newCollection.horizontalSizeClass);
//  UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
//  NSLog(@"%i",orientation);
//  AVCaptureConnection *captureConnection=[self.captureVideoPreviewLayer connection];
//  captureConnection.videoOrientation=orientation;
//
//}
//屏幕旋转时调整视频预览图层的方向
-(void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration{
  AVCaptureConnection *captureConnection=[self.captureVideoPreviewLayer connection];
  captureConnection.videoOrientation=(AVCaptureVideoOrientation)toInterfaceOrientation;
}
//旋转后重新设置大小
-(void)didRotateFromInterfaceOrientation:(UIInterfaceOrientation)fromInterfaceOrientation{
  _captureVideoPreviewLayer.frame=self.viewContainer.bounds;
} 

-(void)dealloc{
  [self removeNotification];
}
#pragma mark - UI方法
#pragma mark 视频录制
- (void)takeButtonClick:(UIButton *)sender {
  //根据设备输出获得连接
  AVCaptureConnection *captureConnection=[self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
  //根据连接取得设备输出的数据
  if (![self.captureMovieFileOutput isRecording]) {
    self.enableRotation=NO;
    //如果支持多任务则则开始多任务
    if ([[UIDevice currentDevice] isMultitaskingSupported]) {
      self.backgroundTaskIdentifier=[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
    }
    //预览图层和视频方向保持一致
    captureConnection.videoOrientation=[self.captureVideoPreviewLayer connection].videoOrientation;
    NSString *outputFielPath=[NSTemporaryDirectory() stringByAppendingString:@"myMovie.mov"];
    NSLog(@"save path is :%@",outputFielPath);
    NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath];
    [self.captureMovieFileOutput startRecordingToOutputFileURL:fileUrl recordingDelegate:self];
  }
  else{
    [self.captureMovieFileOutput stopRecording];//停止录制
  }
}
#pragma mark 切换前后摄像头
- (void)toggleButtonClick:(UIButton *)sender {
  AVCaptureDevice *currentDevice=[self.captureDeviceInput device];
  AVCaptureDevicePosition currentPosition=[currentDevice position];
  [self removeNotificationFromCaptureDevice:currentDevice];
  AVCaptureDevice *toChangeDevice;
  AVCaptureDevicePosition toChangePosition=AVCaptureDevicePositionFront;
  if (currentPosition==AVCaptureDevicePositionUnspecified||currentPosition==AVCaptureDevicePositionFront) {
    toChangePosition=AVCaptureDevicePositionBack;
  }
  toChangeDevice=[self getCameraDeviceWithPosition:toChangePosition];
  [self addNotificationToCaptureDevice:toChangeDevice];
  //获得要调整的设备输入对象
  AVCaptureDeviceInput *toChangeDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:toChangeDevice error:nil]; 

  //改变会话的配置前一定要先开启配置,配置完成后提交配置改变
  [self.captureSession beginConfiguration];
  //移除原有输入对象
  [self.captureSession removeInput:self.captureDeviceInput];
  //添加新的输入对象
  if ([self.captureSession canAddInput:toChangeDeviceInput]) {
    [self.captureSession addInput:toChangeDeviceInput];
    self.captureDeviceInput=toChangeDeviceInput;
  }
  //提交会话配置
  [self.captureSession commitConfiguration]; 

} 

#pragma mark - 视频输出代理
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
  NSLog(@"开始录制...");
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{
  NSLog(@"视频录制完成.");
  //视频录入完成之后在后台将视频存储到相簿
  self.enableRotation=YES;
  UIBackgroundTaskIdentifier lastBackgroundTaskIdentifier=self.backgroundTaskIdentifier;
  self.backgroundTaskIdentifier=UIBackgroundTaskInvalid;
  ALAssetsLibrary *assetsLibrary=[[ALAssetsLibrary alloc]init];
  [assetsLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
    if (error) {
      NSLog(@"保存视频到相簿过程中发生错误,错误信息:%@",error.localizedDescription);
    }
    if (lastBackgroundTaskIdentifier!=UIBackgroundTaskInvalid) {
      [[UIApplication sharedApplication] endBackgroundTask:lastBackgroundTaskIdentifier];
    }
    NSLog(@"成功保存视频到相簿.");
  }]; 

} 

#pragma mark - 通知
/**
 * 给输入设备添加通知
 */
-(void)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice{
  //注意添加区域改变捕获通知必须首先设置设备允许捕获
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
    captureDevice.subjectAreaChangeMonitoringEnabled=YES;
  }];
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
  //捕获区域发生改变
  [notificationCenter addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}
-(void)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice{
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
  [notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}
/**
 * 移除所有通知
 */
-(void)removeNotification{
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
  [notificationCenter removeObserver:self];
} 

-(void)addNotificationToCaptureSession:(AVCaptureSession *)captureSession{
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
  //会话出错
  [notificationCenter addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:captureSession];
} 

/**
 * 设备连接成功
 *
 * @param notification 通知对象
 */
-(void)deviceConnected:(NSNotification *)notification{
  NSLog(@"设备已连接...");
}
/**
 * 设备连接断开
 *
 * @param notification 通知对象
 */
-(void)deviceDisconnected:(NSNotification *)notification{
  NSLog(@"设备已断开.");
}
/**
 * 捕获区域改变
 *
 * @param notification 通知对象
 */
-(void)areaChange:(NSNotification *)notification{
  NSLog(@"捕获区域改变...");
} 

/**
 * 会话出错
 *
 * @param notification 通知对象
 */
-(void)sessionRuntimeError:(NSNotification *)notification{
  NSLog(@"会话发生错误.");
} 

#pragma mark - 私有方法 

/**
 * 取得指定位置的摄像头
 *
 * @param position 摄像头位置
 *
 * @return 摄像头设备
 */
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{
  NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  for (AVCaptureDevice *camera in cameras) {
    if ([camera position]==position) {
      return camera;
    }
  }
  return nil;
} 

/**
 * 改变设备属性的统一操作方法
 *
 * @param propertyChange 属性改变操作
 */
-(void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{
  AVCaptureDevice *captureDevice= [self.captureDeviceInput device];
  NSError *error;
  //注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁
  if ([captureDevice lockForConfiguration:&error]) {
    propertyChange(captureDevice);
    [captureDevice unlockForConfiguration];
  }else{
    NSLog(@"设置设备属性过程发生错误,错误信息:%@",error.localizedDescription);
  }
} 

/**
 * 设置闪光灯模式
 *
 * @param flashMode 闪光灯模式
 */
-(void)setFlashMode:(AVCaptureFlashMode )flashMode{
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
    if ([captureDevice isFlashModeSupported:flashMode]) {
      [captureDevice setFlashMode:flashMode];
    }
  }];
}
/**
 * 设置聚焦模式
 *
 * @param focusMode 聚焦模式
 */
-(void)setFocusMode:(AVCaptureFocusMode )focusMode{
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
    if ([captureDevice isFocusModeSupported:focusMode]) {
      [captureDevice setFocusMode:focusMode];
    }
  }];
}
/**
 * 设置曝光模式
 *
 * @param exposureMode 曝光模式
 */
-(void)setExposureMode:(AVCaptureExposureMode)exposureMode{
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
    if ([captureDevice isExposureModeSupported:exposureMode]) {
      [captureDevice setExposureMode:exposureMode];
    }
  }];
}
/**
 * 设置聚焦点
 *
 * @param point 聚焦点
 */
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
    if ([captureDevice isFocusModeSupported:focusMode]) {
      [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
    }
    if ([captureDevice isFocusPointOfInterestSupported]) {
      [captureDevice setFocusPointOfInterest:point];
    }
    if ([captureDevice isExposureModeSupported:exposureMode]) {
      [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
    }
    if ([captureDevice isExposurePointOfInterestSupported]) {
      [captureDevice setExposurePointOfInterest:point];
    }
  }];
} 

/**
 * 添加点按手势,点按时聚焦
 */
-(void)addGenstureRecognizer{
  UITapGestureRecognizer *tapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)];
  [self.viewContainer addGestureRecognizer:tapGesture];
}
-(void)tapScreen:(UITapGestureRecognizer *)tapGesture{
  CGPoint point= [tapGesture locationInView:self.viewContainer];
  //将UI坐标转化为摄像头坐标
  CGPoint cameraPoint= [self.captureVideoPreviewLayer captureDevicePointOfInterestForPoint:point];
  [self setFocusCursorWithPoint:point];
  [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint];
} 

/**
 * 设置聚焦光标位置
 *
 * @param point 光标位置
 */
-(void)setFocusCursorWithPoint:(CGPoint)point{
  self.focusCursor.center=point;
  self.focusCursor.transform=CGAffineTransformMakeScale(1.5, 1.5);
  self.focusCursor.alpha=1.0;
  [UIView animateWithDuration:1.0 animations:^{
    self.focusCursor.transform=CGAffineTransformIdentity;
  } completion:^(BOOL finished) {
    self.focusCursor.alpha=0;
  }];
}
@end 

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持我们。

您可能感兴趣的文章:

  • IOS UIImagePickerController从拍照、图库、相册获取图片
  • JS解决IOS中拍照图片预览旋转90度BUG的问题
  • Android 实现IOS选择拍照相册底部弹出的实例
  • ios利用 AFN 上传相册或者拍照图片
  • iOS拍照后图片自动旋转90度的完美解决方法
  • 详解利用exif.js解决ios手机上传竖拍照片旋转90度问题
  • iOS视频录制(或选择)压缩及上传功能(整理)
  • iOS仿微信相机拍照、视频录制功能
(0)

相关推荐

  • iOS仿微信相机拍照、视频录制功能

    网上有很多自定义相机的例子,这里只是我临时写的一个iOS自定义相机(仿微信)拍照.视频录制demo,仅供参考: 用到了下面几个库: #import <AVFoundation/AVFoundation.h> #import <AssetsLibrary/AssetsLibrary.h> 在使用的时候需要在Info.plist中把相关权限写进去: Privacy - Microphone Usage Description Privacy - Photo Library Usage

  • 详解利用exif.js解决ios手机上传竖拍照片旋转90度问题

    HTML5+canvas进行移动端手机照片上传时,发现iOS手机上传竖拍照片会逆时针旋转90度,横拍照片无此问题:Android手机没这个问题. 因此解决这个问题的思路是:获取到照片拍摄的方向角,对非横拍的ios照片进行角度旋转修正. 利用exif.js读取照片的拍摄信息,这里主要用到Orientation属性. Orientation属性说明如下: 下面就直接上代码了. 主要有html5页面和一个js,示例功能包含了图片压缩和旋转. 自己写的是uploadImage.js. html5测试页面

  • Android 实现IOS选择拍照相册底部弹出的实例

    Android 实现IOS选择拍照相册底部弹出的实例 效果图 1. AndroidStudio使用 dependencies { compile 'com.guoqi.widget:actionsheet:1.0' } 2. 使用 //1.实现接口 implements ActionSheet.OnActionSheetSelected //2.在某个点击事件中添加: ActionSheet.showSheet(this, this, null); //3.然后重写点击方法: @Override

  • iOS视频录制(或选择)压缩及上传功能(整理)

    最新做的一个功能涉及到了视频的录制.压缩及上传.根据网上诸多大神的经验,终于算是调通了,但也发现了一些问题,所以把我的经验分享一下. 首先,肯定是调用一下系统的相机或相册 代码很基本: //选择本地视频 - (void)choosevideo { UIImagePickerController *ipc = [[UIImagePickerController alloc] init]; ipc.sourceType = UIImagePickerControllerSourceTypePhoto

  • ios利用 AFN 上传相册或者拍照图片

    由于项目中多处需要上传图片,我们可以自定义上传图片请求,自定义调取相册及拍照,方便多处使用时调用. 主要步骤: 1.第一步:请求上传你选取的相册图片或者拍照图片(经过压缩处理) 2.第二步:获取到第一步图片url上传给服务器 3.第三步:回显图片(当然进入该界面时先判断是否有图片,无图则展示占位图片,否则就回显图片) 废话不多说,直接上代码: 1)封装的上传图片的网络请求(图片压缩) QTXUploadImage 文件 // 利用 afn 上传一张图片 #import <Foundation/F

  • IOS UIImagePickerController从拍照、图库、相册获取图片

    IOS UIImagePickerController从拍照.图库.相册获取图片 iOS 获取图片有三种方法: 1. 直接调用摄像头拍照 2. 从相册中选择 3. 从图库中选择 UIImagePickerController 是系统提供的用来获取图片和视频的接口: 用UIImagePickerController 类来获取图片视频,大体分为以下几个步骤: 1. 初始化UIImagePickerController 类: 2. 设置UIImagePickerController 实例的数据来源类型

  • iOS拍照后图片自动旋转90度的完美解决方法

    今天开发一个拍照获取照片的功能的时候, 发现上传之后图片会自动旋转90. 测试发现, 只要是图片大于2M, 系统就会自动翻转照片 相机拍照后直接取出来的UIimage(用UIImagePickerControllerOriginalImage取出),它本身的imageOrientation属性是3,即UIImageOrientationRight.如果这个图片直接使用则没事,但是如果对它进行裁剪.缩放等操作后,它的这个imageOrientation属性会变成0.此时这张图片用在别的地方就会发生

  • JS解决IOS中拍照图片预览旋转90度BUG的问题

    上篇文章[Js利用Canvas实现图片压缩功能]中做了图片压缩上传,但是在IOS真机测试的时候,发现图片预览的时候自动逆时针旋转了90度.对于这个bug,我完全不知道问题出在哪里,接下来就是面向百度编程了.通过度娘找到了相关资料,解决方法记录在此.这个问题的具体因素其实我还是不清楚是为何导致的,只有IOS和部分三星手机会出现此bug. 绝大部分的安卓机并无此问题. 解决此问题需要引入一个第三方 JS 库: exif.js 下载地址:https://github.com/exif-js/exif-

  • iOS AVCaptureSession实现视频录制功能

    本文实例为大家分享了AVCaptureSession实现视频录制功能的具体代码,供大家参考,具体内容如下 #import "RecordingVideoViewController.h" #import <AVFoundation/AVFoundation.h> #import <AssetsLibrary/AssetsLibrary.h> @interface RecordingVideoViewController ()<AVCaptureFileOu

  • Android 微信小视频录制功能实现详细介绍

    Android 微信小视频录制功能 开发之前 这几天接触了一下和视频相关的控件, 所以, 继之前的微信摇一摇, 我想到了来实现一下微信小视频录制的功能, 它的功能点比较多, 我每天都抽出点时间来写写, 说实话, 有些东西还是比较费劲, 希望大家认真看看, 说得不对的地方还请大家在评论中指正. 废话不多说, 进入正题. 开发环境 最近刚更新的, 没更新的小伙伴们抓紧了 Android Studio 2.2.2 JDK1.7 API 24 Gradle 2.2.2 相关知识点 视频录制界面 Surf

  • 基于AForge实现C#摄像头视频录制功能

    本文为大家分享了AForge实现C#摄像头视频录制功能的具体方法,供大家参考,具体内容如下 1. 概述 最近由于兴趣学习了下在C#上使用AForge录制摄像头视频并压缩编码.总体上来说这个第三方.net视觉开发库还是比较稳定的(AForge lib下载.离线帮助文档下载).但是由于这个第三方库维护不怎么样,导致会出现不兼容的问题.这里将这些与大家分享,希望对您有帮助. 在使用AForge第三方库录制本地视频所要使用到的类主要有这几个:FilterInfoCollection.VideoCaptu

  • Android视频录制功能的实现步骤

    官方使用指南请查看Google音频和视频指南 视频录制基本步骤 1.申明权限 <uses-permission android:name="android.permission.RECORD_AUDIO" /> <--如果录制的视频保存在外部SD卡,还需要添加以下权限-> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> 注意:

  • Android使用MediaRecorder类实现视频和音频录制功能

    一.前期基础知识储备 Android提供了MediaRecorder这一个类来实现视频和音频的录制. 由官方配图可知,MediaRecorder用于录制视频时需要调用一系列的API来设置和录制相关的配置,而且调用方法的顺序是固定的,必须按照这个顺序进行API调用才能正确利用手机摄像头实现录像功能. 调用MediaRecorder的录像API顺序如下: 1)Open Camera - Use the Camera.open() to get an instance of the camera ob

  • 详解Ubuntu18.04下配置Nginx+RTMP+HLS+HTTPFLV服务器实现点播/直播/录制功能

    2019.9.4更新 继续玩又发现个好玩的东西,nginx-http-flv-module模块,集成了之前的RTMP模块,又有httpflv模块,还是咱们国内程序员大神开发维护,真是开心,国内的大神如此出色,为他们这些愿意分享技术的人点32个赞,具体的编译和安装方式与RTMP模块基本一样,配置readme中也说得很详细,就不赘述了,需要注意的一点是,httpflv方式客户端想看也是需要服务设置cors的,这点readme中没有提到好像. 2019.6.27更新 再更新个windows版本的搭建方

  • iOS实现微信朋友圈视频截取功能

    序言 微信现在这么普及,功能也做的越来越强大,不知大家对于微信朋友圈发视频截取的功能或者苹果拍视频对视频编辑的功能有没有了解(作者这里也猜测,微信的这个功能也是仿苹果的).感觉这个功能确实很方便实用,近来作者也在研究音视频功能,所以就实现了一下这个功能. 功能其实看着挺简单,实现过程也踩了不少坑.一方面记录一下:另一方面也算是对实现过程的再一次梳理,这样大家看代码也会比较明白. 效果 我们先看看我实现的效果 实现 实现过程分析 整个功能可以分为三部分: 视频播放 这部分我们单独封装一个视频播放器

  • iOS中设置清除缓存功能的实现方法

    绝大多数应用中都存在着清楚缓存的功能,形形色色,各有千秋,现为大家介绍一种最基础的清除缓存的方法.清除缓存基本上都是在设置界面的某一个Cell,于是我们可以把清除缓存封装在某一个自定义Cell中,如下图所示: 具体步骤 使用注意:过程中需要用到第三方库,请提前安装好:SDWebImage.SVProgressHUD. 1. 创建自定义Cell,命名为GYLClearCacheCell 重写initWithStyle:(UITableViewCellStyle)style reuseIdentif

随机推荐