自己封装了一个视频采集类。

1.支持动态修改前后置摄像头。

2.支持分辨率、帧率修改,当设置不支持的分辨率时自动匹配最相近的分辨率。

3.视频预览功能

VideoCapture.h

//
//  VideoCapture.h
//  VideoCapture
//
//  Created by whw on 16-8-27.
//  Copyright (c) 2016年 All rights reserved.
//#import <AVFoundation/AVFoundation.h>
#import "AVDefine.h"class  NativeVideoFrame;
struct VideoCapability;@protocol CaptureDelegate <Reporter>
- (void)rawFrame:(NativeVideoFrame*)frame Capability:(struct VideoCapability*)capability;
@end@interface VideoCapture : NSObject
- (id)initWithDelegate:(id<CaptureDelegate>)delegate;
- (BOOL)setCaptureDeviceByPosition:(BOOL)front;
- (BOOL)setCaptureCapability:(struct VideoCapability*)capability;
- (BOOL)startCapture;
- (BOOL)stopCapture;
- (AVCaptureVideoPreviewLayer *)previewLayer;
- (void)takePicture:(int)size Complete:(void(^)(UIImage*, NSError*))complete;
@end

VideoCapture.mm

//
//  VideoCapture.m
//  VideoCapture
//
//  Created by whw on 16-8-27.
//  Copyright (c) 2016年 All rights reserved.
//
#import <VideoCapture.h>
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <Utils.h>#include "VideoFrame.h"
#import "VideoDefines.h"static dispatch_queue_t captureQueue = nil;
static const char *captureQueueSpecific = "com.video.capturequeue";@interface VideoCapture() <AVCaptureVideoDataOutputSampleBufferDelegate>
{__weak id<CaptureDelegate> _owner;VideoCapability _capability;AVCaptureSession* _captureSession;Rotation _frameRotation;BOOL _orientationHasChanged;AVCaptureConnection* _connection;BOOL _captureChanging;  // Guarded by _captureChangingCondition.NSCondition* _captureChangingCondition;NativeVideoFrame _videoFrame;
}
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *prevLayer;
@property (nonatomic, strong) NSDictionary *resolutionDic;
@end
@implementation VideoCapture
- (dispatch_queue_t)captureQueue
{if (captureQueue == NULL){captureQueue = dispatch_queue_create(captureQueueSpecific, 0);dispatch_queue_set_specific(captureQueue, captureQueueSpecific, (void *)captureQueueSpecific, NULL);}return captureQueue;
}
- (bool)isCurrentQueueCaptureQueue
{return dispatch_get_specific(captureQueueSpecific) != NULL;
}
- (void)dispatchOnCaptureQueue:(dispatch_block_t)block synchronous:(bool)synchronous
{if ([self isCurrentQueueCaptureQueue]){@autoreleasepool{block();}}else{if (synchronous){dispatch_sync([self captureQueue], ^{@autoreleasepool{block();}});}else{dispatch_async([self captureQueue], ^{@autoreleasepool{block();}});}}
}
- (id)initWithDelegate:(id<CaptureDelegate>)owner
{if (self == [super init]) {_owner = owner;_captureSession = [[AVCaptureSession alloc] init];
#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0NSString* version = [[UIDevice currentDevice] systemVersion];if ([version integerValue] >= 7) {_captureSession.usesApplicationAudioSession = NO;_captureSession.automaticallyConfiguresApplicationAudioSession = NO;}
#endif_captureChanging = NO;_captureChangingCondition = [[NSCondition alloc] init];if (!_captureSession || !_captureChangingCondition) {return nil;}// create and configure a new output (using callbacks)AVCaptureVideoDataOutput* captureOutput = [[AVCaptureVideoDataOutput alloc] init];NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;NSNumber* val = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:val forKey:key];[captureOutput setVideoSettings:videoSettings];[captureOutput setAlwaysDiscardsLateVideoFrames:YES];// add new outputif ([_captureSession canAddOutput:captureOutput]) {[_captureSession addOutput:captureOutput];} else {//error}if (!_prevLayer){_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_captureSession];_prevLayer.videoGravity = AVLayerVideoGravityResizeAspect;}[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];[notify addObserver:selfselector:@selector(onVideoError:)name:AVCaptureSessionRuntimeErrorNotificationobject:_captureSession];[notify addObserver:selfselector:@selector(deviceOrientationDidChange:)name:UIDeviceOrientationDidChangeNotificationobject:nil];}return self;
}- (void)directOutputToSelf {[[self currentOutput] setSampleBufferDelegate:self queue:[self captureQueue]];
}- (void)directOutputToNil {[[self currentOutput] setSampleBufferDelegate:nil queue:NULL];
}
- (void)deviceOrientationDidChange:(NSNotification*)notification {_orientationHasChanged = YES;[self setRelativeVideoOrientation];
}
- (void)setRelativeVideoOrientation {if (!_connection.supportsVideoOrientation) {return;}switch ([UIDevice currentDevice].orientation) {case UIDeviceOrientationPortrait:_connection.videoOrientation = AVCaptureVideoOrientationPortrait;break;case UIDeviceOrientationPortraitUpsideDown:_connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;break;case UIDeviceOrientationLandscapeLeft:_connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;break;case UIDeviceOrientationLandscapeRight:_connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;break;case UIDeviceOrientationFaceUp:case UIDeviceOrientationFaceDown:case UIDeviceOrientationUnknown:if (!_orientationHasChanged) {_connection.videoOrientation = AVCaptureVideoOrientationPortrait;}break;}_prevLayer.connection.videoOrientation = _connection.videoOrientation;
}- (void)dealloc {[[NSNotificationCenter defaultCenter] removeObserver:self];
}- (BOOL)setCaptureDeviceByPosition:(BOOL)front
{[self waitForCaptureChangeToFinish];// check to see if the camera is already setAVCaptureDevicePosition desiredPosition = front?AVCaptureDevicePositionFront:AVCaptureDevicePositionBack;if (_captureSession){NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];if ([currentInputs count] > 0){AVCaptureDeviceInput* currentInput = [currentInputs objectAtIndex:0];if ([currentInput.device position] == desiredPosition){return YES;}}}return [self changeCaptureInputByPosition:desiredPosition];
}
- (BOOL)setCaptureCapability:(struct VideoCapability*)capability
{if ((*capability) == _capability)return YES;BOOL restart = [_captureSession isRunning];if (restart){[self stopCapture];}_capability = *capability;if (restart){[self startCapture];}return YES;
}
- (BOOL)startCapture
{[self waitForCaptureChangeToFinish];if (!_captureSession) {return NO;}AVCaptureVideoDataOutput* currentOutput = [self currentOutput];if (!currentOutput)return NO;[self directOutputToSelf];_orientationHasChanged = NO;_captureChanging = YES;dispatch_async([self captureQueue], ^(void) { [self startCaptureInBackgroundWithOutput:currentOutput]; });return YES;
}- (AVCaptureVideoDataOutput*)currentOutput {return [[_captureSession outputs] firstObject];
}- (void)startCaptureInBackgroundWithOutput: (AVCaptureVideoDataOutput*)currentOutput {// begin configuration for the AVCaptureSession[_captureSession beginConfiguration];// take care of capture framerate nowNSArray* sessionInputs = _captureSession.inputs;AVCaptureDeviceInput* deviceInput = [sessionInputs count] > 0 ? sessionInputs[0] : nil;AVCaptureDevice* inputDevice = deviceInput.device;if (inputDevice) {NSString* captureQuality = [self presetFromResolution:inputDevice];// picture resolution[_captureSession setSessionPreset:captureQuality];CMTime tm = CMTimeMake(1, _capability.fps);[inputDevice lockForConfiguration:nil];[inputDevice setActiveVideoMaxFrameDuration:tm];[inputDevice setActiveVideoMinFrameDuration:tm];[inputDevice unlockForConfiguration];}_connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];[self setRelativeVideoOrientation];// finished configuring, commit settings to AVCaptureSession.[_captureSession commitConfiguration];[_captureSession startRunning];[self signalCaptureChangeEnd];
}- (void)onVideoError:(NSNotification*)notification {AVLogError(@"onVideoError: %@", notification);
}- (BOOL)stopCapture {[self waitForCaptureChangeToFinish];[self directOutputToNil];if (!_captureSession) {return NO;}_orientationHasChanged = NO;_captureChanging = YES;dispatch_async([self captureQueue], ^(void) { [self stopCaptureInBackground]; });return YES;
}- (void)stopCaptureInBackground {[_captureSession stopRunning];[self signalCaptureChangeEnd];
}
+ (int)captureDeviceCount {return (int)[[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
}+ (AVCaptureDevice*)captureDeviceForPosition:(AVCaptureDevicePosition)positon
{for (AVCaptureDevice* device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {if (positon == device.position) {return device;}}return nil;
}
- (BOOL)changeCaptureInputByPosition:(AVCaptureDevicePosition)positon {BOOL restart = [_captureSession isRunning];if (restart){[self stopCapture];}[self waitForCaptureChangeToFinish];NSArray* currentInputs = [_captureSession inputs];// remove current inputif ([currentInputs count] > 0) {AVCaptureInput* currentInput = (AVCaptureInput*)[currentInputs objectAtIndex:0];[_captureSession removeInput:currentInput];}// Look for input device with the name requested (as our input param)// get list of available capture devicesint captureDeviceCount = [VideoCapture captureDeviceCount];if (captureDeviceCount <= 0) {return NO;}AVCaptureDevice* captureDevice = [VideoCapture captureDeviceForPosition:positon];if (!captureDevice) {return NO;}// now create capture session input out of AVCaptureDeviceNSError* deviceError = nil;AVCaptureDeviceInput* newCaptureInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDeviceerror:&deviceError];if (!newCaptureInput) {const char* errorMessage = [[deviceError localizedDescription] UTF8String];UNUSED(errorMessage);//todo tyy  开始设备失败[self onError:Session_Errot_OpenCamera_Failed];return NO;}// try to add our new capture device to the capture session[_captureSession beginConfiguration];BOOL addedCaptureInput = NO;if ([_captureSession canAddInput:newCaptureInput]) {[_captureSession addInput:newCaptureInput];addedCaptureInput = YES;} else {addedCaptureInput = NO;}[_captureSession commitConfiguration];if (restart){[self startCapture];}return addedCaptureInput;
}- (void)captureOutput:(AVCaptureOutput*)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBufferfromConnection:(AVCaptureConnection*)connection {CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);_videoFrame.CreateFrame(videoFrame);VideoCapability tempCaptureCapability;tempCaptureCapability.width = _videoFrame.width();tempCaptureCapability.height = _videoFrame.height();tempCaptureCapability.fps = _capability.fps;if (_owner){[_owner rawFrame:&_videoFrame Capability:&tempCaptureCapability];}_videoFrame.Reset();
}- (void)signalCaptureChangeEnd {[_captureChangingCondition lock];_captureChanging = NO;[_captureChangingCondition signal];[_captureChangingCondition unlock];
}- (void)waitForCaptureChangeToFinish {[_captureChangingCondition lock];while (_captureChanging) {[_captureChangingCondition wait];}[_captureChangingCondition unlock];
}
- (AVCaptureVideoPreviewLayer *)previewLayer
{return _prevLayer;
}
- (void)takePicture:(int)size Complete:(void(^)(UIImage*, NSError*))complete
{/*[self dispatchOnCaptureQueue:^{do {if (!self.imageOutput)break;_takingPicture = YES;AVCaptureConnection *videoConnection = nil;for (AVCaptureConnection *connection in self.imageOutput.connections) {for (AVCaptureInputPort *port in [connection inputPorts]) {if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {videoConnection = connection;break;}}if (videoConnection) {break;}}// 拍照NSLog(@"Picture taking");[self.imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error){UIImage *image = nil;if (CMSampleBufferIsValid(imageSampleBuffer)){// 获取图片数据NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];image = [[UIImage alloc] initWithData:imageData];}dispatch_async([self captureQueue], ^{_takingPicture = NO;NSLog(@"Picture taken");if (complete){dispatch_async(dispatch_get_main_queue(), ^{complete(image, error);});}});}];return;} while (0);if (complete){dispatch_async(dispatch_get_main_queue(), ^{complete(nil, nil);});}} synchronous:false];*/
}
+ (NSDictionary *)getResolutions
{NSMutableDictionary *dic = [[NSMutableDictionary alloc] init];for (AVCaptureDevice * videoDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]){if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset1920x1080]){[dic setObject:AVCaptureSessionPreset1920x1080 forKey:@"1920*1080"];}if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720]){[dic setObject:AVCaptureSessionPreset1280x720 forKey:@"1280*720"];}if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset640x480]){[dic setObject:AVCaptureSessionPreset640x480 forKey:@"640*480"];}if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset352x288]){[dic setObject:AVCaptureSessionPreset352x288 forKey:@"352*288"];}if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPresetLow]){[dic setObject:AVCaptureSessionPresetLow forKey:@"192*144"];}}return dic;
}+(NSArray*)sortedResolutionArray:(NSDictionary*)dic
{NSArray* arr = [[dic allKeys] sortedArrayUsingComparator:^NSComparisonResult(id obj1, id obj2){NSString* str1 = obj1;NSArray *listItems1 = [str1 componentsSeparatedByString:@"*"];int w1 = [[listItems1 objectAtIndex:0] intValue];int h1 = [[listItems1 objectAtIndex:1] intValue];NSString* str2 = obj2;NSArray *listItems2 = [str2 componentsSeparatedByString:@"*"];int w2 = [[listItems2 objectAtIndex:0] intValue];int h2 = [[listItems2 objectAtIndex:1] intValue];if (w1 > w2){return NSOrderedDescending;}if (w1 < w2){return NSOrderedAscending;}if (h1 > h2){return NSOrderedDescending;}if (h1 < h2){return NSOrderedAscending;}return NSOrderedSame;}];return arr;
}- (NSString *)presetFromResolution:(AVCaptureDevice*)device
{if (!self.resolutionDic)self.resolutionDic = [VideoCapture getResolutions];NSString* sessionPreset = [NSString stringWithFormat:@"%d*%d",_capability.width,_capability.height];NSString* DesiredPreset = [self.resolutionDic objectForKey:sessionPreset];if(DesiredPreset && [device supportsAVCaptureSessionPreset:DesiredPreset]){//这里表示 这个设备支持此分辨率return DesiredPreset;}//todo tyy 上面没有return 说明不支持此分辨率 需要给上层提示[self onWarning:Session_Warning_VideoProfile_NotSupported];NSArray* arr = [VideoCapture sortedResolutionArray:self.resolutionDic];for (int i = 0; i < arr.count; ++i){if([arr objectAtIndex:i] == sessionPreset){for(int j = 1;(i + j < arr.count || i - j >= 0); ++j){NSString* preset = nil;if(i - j >= 0){preset = [arr objectAtIndex:i - j];if([device supportsAVCaptureSessionPreset:[self.resolutionDic objectForKey:preset]]){return [self.resolutionDic objectForKey:preset];}}if(i+j < arr.count){preset = [arr objectAtIndex:i + j];if([device supportsAVCaptureSessionPreset:[self.resolutionDic objectForKey:preset]]){return [self.resolutionDic objectForKey:preset];}}}break;}}return AVCaptureSessionPresetLow;
}#pragma mark - Reporter- (void)onWarning:(AVSessionWaringCode)warning
{if (_owner && [_owner respondsToSelector:@selector(onWarning:)]) {[_owner onWarning:warning];}
}
- (void)onError:(AVSessionErrorCode)error
{if (_owner && [_owner respondsToSelector:@selector(onError:)]) {[_owner onError:error];}
}
@end

转载请注明原地址,  谢谢!

源码地址:https://github.com/haowei8196/VideoEngineMgr

ios直播技术(1)-- 视频采集相关推荐

  1. iOS 直播专题2-音视频采集

    从设备(手机)的摄像头.MIC中采集音频.视频的原始数据 ios的音视频采集可以从AVFoundation框架里采集 视频采集 这里我们选取GPUImage来采集视频,因为这个框架集成了很多视频滤镜, ...

  2. iOS直播技术分享-音视频采集(一)

    1.iOS直播技术的流程 直播技术的流程大致可以分为几个步骤:数据采集.图像处理(实时滤镜).视频编码.封包.上传.云端(转码.录制.分发).直播播放器. 数据采集:通过摄像头和麦克风获得实时的音视频 ...

  3. iOS 直播技术及Demo

    要过年了,新年快乐,今天写一些关于iOS直播技术相关知识,及详细Demo介绍,首先请下载Demo Demo下载地址(点击跳转下载) 一.直播介绍 1.1.直播现状 近年来,直播越来越火,但直播技术却对 ...

  4. HTTP Live Streaming直播(iOS直播)技术分析与实现(转)

    HTTP Live Streaming直播(iOS直播)技术分析与实现 转载于:https://www.cnblogs.com/chen-jt/p/3885223.html

  5. HTTP Live Streaming直播源代码软件开发(iOS直播)技术分析与实现

    HLS技术要点分析 HTTP Live Streaming(HLS)是苹果公司(Apple Inc.)实现的基于HTTP的流媒体传输协议,可实现流媒体的直播和点播,主要应用在iOS系统,为iOS设备( ...

  6. iOS - 直播流程,视频推流,视频拉流,简介,SMTP、RTMP、HLS、 PLPlayerKit

    收藏笔记 1 . 音视频处理的一般流程: 数据采集→数据编码→数据传输(流媒体服务器) →解码数据→播放显示 1.数据采集: 摄像机及拾音器收集视频及音频数据,此时得到的为原始数据 涉及技术或协议: ...

  7. iOS直播技术分析与实现

    不经意间发现,两个月没写博客了 , 把最近的一些技术成果,总结成了文章,与大家分享. 视频直播技术要点分析 HTTP Live Streaming(HLS)是苹果公司(Apple Inc.)实现的基于 ...

  8. iOS直播技术学习笔记 直播总体概览(一)

    ####概述 直播的现状 2016年,是一个直播年.直播行业快速发展,同时也滋生了大大小小上千家相关的公司. 中国互联网络信息中心发布的报告显示,截至今年6月,我国网络直播用户规模达到3.25亿,占网 ...

  9. iOS 直播技术文档

    iOS 直播 个人项目可以参考+lflivekit+ljkplayer 第三方推荐使用金山云 im推荐使用容云 网络层(socket或st)负责传输,协议层(rtmp或hls)负责网络打包,封装层(f ...

最新文章

  1. SpringBoot高级篇MongoDB之修改基本使用姿势
  2. 第十六届全国大学生智能汽车竞赛总决赛 AI视觉组线上赛细则
  3. Cisco对中国CCIE认证下重手了
  4. 关于无法加载sass 模块问题。vue2.0中报错ERROR :scss can't resolve 'scss-loader'
  5. spark和HSQL的连接join方式
  6. Java IO: InputStream
  7. 阿里P8大佬亲自教你!Android内存泄漏总结,看看这篇文章吧!
  8. LeetCode——Word Break
  9. robo3t 连接服务器数据库_mongodb开启公共IP访问及Robo 3T连接远程mongodb服务器
  10. Windows下使用pthread
  11. 让axios读取本地模拟数据报错404: Error: Request failed with status code 404
  12. 科技互联网都有啥热点信息?爬取虎嗅5万篇文章告诉你
  13. 你未必知道的 WebRTC – 前世、今生、未来
  14. 让你快速逃离万能五笔的广告
  15. Java 2 实用教程(第5版)耿祥义版 习题七
  16. 微信小程序如何获取微信昵称和头像
  17. java中求两个数的最小公倍数,最大公约数的简便方法
  18. 【老九学堂】【C语言】CodeBlocks安装文档
  19. 变态级JAVA程序员面试32问(附答案)(转载)
  20. 教您用几何画板画相切的圆

热门文章

  1. 小嘿嘿之群体智能优化算法
  2. html5 3d原理,使用css创建三角形 使用CSS3创建3d四面体原理及代码(html5实践)
  3. 安全相关一:iPhone锁机清除密码
  4. 押注泛C端交易合规:高灯推动财税科技“底层思维”革新?
  5. 科技云报道:巨头下场,生态聚合,低代码不再是个伪命题
  6. 【一文学会文件上传】SpringBoot+form表单实现文件上传
  7. 绝地求生要java吗_《和平精英》还是《绝地求生》的手游吗?光子早就和我们说了答案...
  8. Vue中的computed是什么?怎么用?
  9. cass软件yy命令_CASS常用命令大全
  10. 如何用python画折线图的时候标出阴影柱状背景_python绘图的图线阴影添加的几种方法介绍...