iOS实现摄像头实时采集图像
本文实例为大家分享了iOS实现摄像头实时采集图像的具体代码,供大家参考,具体内容如下
新接到一个实时获取摄像头当前照片的需求,在设定的时间内需要保持摄像头处在开启状态并可以实时回调到当前的图片数据信息;
此次结合 AVCaptureDevice、AVCaptureSession、AVCaptureVideoPreviewLayer 将其与 UIView、UIImageView 和 UIImage 相结合;
具体实现 code 如下:
#import <UIKit/UIKit.h> #import <CoreVideo/CoreVideo.h> #import <CoreMedia/CoreMedia.h> #import <AVFoundation/AVFoundation.h> NS_ASSUME_NONNULL_BEGIN @interface YHCameraView : UIView <AVCaptureVideoDataOutputSampleBufferDelegate> @property (nonatomic, weak) UIImageView *cameraImageView; @property (strong, nonatomic) AVCaptureDevice* device; @property (strong, nonatomic) AVCaptureSession* captureSession; @property (strong, nonatomic) AVCaptureVideoPreviewLayer* previewLayer; @property (strong, nonatomic) UIImage* cameraImage; @end NS_ASSUME_NONNULL_END
#import "YHCameraView.h" @implementation YHCameraView - (instancetype)initWithFrame:(CGRect)frame { if (self = [super initWithFrame:frame]) { self.backgroundColor = [UIColor lightGrayColor]; [self createUI]; } return self; } /* // Only override drawRect: if you perform custom drawing. // An empty implementation adversely affects performance during animation. - (void)drawRect:(CGRect)rect { // Drawing code } */ - (void)createUI { NSArray* devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; for(AVCaptureDevice *device in devices) { if([device position] == AVCaptureDevicePositionFront) // 前置摄像头 self.device = device; } AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil]; AVCaptureVideoDataOutput* output = [[AVCaptureVideoDataOutput alloc] init]; output.alwaysDiscardsLateVideoFrames = YES; dispatch_queue_t queue; queue = dispatch_queue_create("cameraQueue", NULL); [output setSampleBufferDelegate:self queue:queue]; NSString* key = (NSString *) kCVPixelBufferPixelFormatTypeKey; NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; [output setVideoSettings:videoSettings]; self.captureSession = [[AVCaptureSession alloc] init]; [self.captureSession addInput:input]; [self.captureSession addOutput:output]; [self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto]; self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession]; self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; // CHECK FOR YOUR APP NSInteger screenWidth = self.frame.size.width; NSInteger screenHeitht = self.frame.size.height; self.previewLayer.frame = self.bounds; self.previewLayer.orientation = AVCaptureVideoOrientationPortrait; // CHECK FOR YOUR APP // [self.layer insertSublayer:self.previewLayer atIndex:0]; // Comment-out to hide preview layer [self.captureSession startRunning]; } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(imageBuffer, 0); uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); CGImageRef newImage = CGBitmapContextCreateImage(newContext); CGContextRelease(newContext); CGColorSpaceRelease(colorSpace); self.cameraImage = [UIImage imageWithCGImage:newImage scale:1.0f orientation:UIImageOrientationLeftMirrored]; // UIImageOrientationDownMirrored self.cameraImageView.image = [UIImage imageWithCGImage:newImage scale:1.0f orientation:UIImageOrientationLeftMirrored]; CGImageRelease(newImage); CVPixelBufferUnlockBaseAddress(imageBuffer, 0); } @end
将其实例化后在需要的时候直接获取其 cameraView 的 cameraImage 即可;
#pragma mark - 快照采集 /// 快照采集 - (YHCameraView *)cameraView { if (!_cameraView) { YHCameraView *view = [[YHCameraView alloc] init]; view.frame = CGRectMake(1, 1, 1, 1); view.cameraImageView.image = view.cameraImage; _cameraView = view; } return _cameraView; } NSString *strImg = [YHCameraManager imageBase64EncodedWithImage:self.cameraView.cameraImage AndImageType:@"JPEG"]; // 获取照片信息
/** 图片转 Base64 @param img 原图片 @param type 图片类型(PNG 或 JPEG) @return 处理结果 */ + (NSString *)imageBase64EncodedWithImage:(UIImage *)img AndImageType:(NSString *)type { NSString *callBack = nil; if ([img isKindOfClass:[UIImage class]]) { NSData *data = [NSData data]; if ([type isEqualToString:@"PNG"]) { data = UIImagePNGRepresentation(img); } else { data = UIImageJPEGRepresentation(img, 1.0f); } NSString *encodedImgStr = [data base64EncodedStringWithOptions:NSDataBase64Encoding64CharacterLineLength]; NSLog(@"YHCameraManager\nencodedImgStr: %@", encodedImgStr); return encodedImgStr; } else { return callBack; } }
以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持我们。
赞 (0)